pax_global_header00006660000000000000000000000064147625556630014535gustar00rootroot0000000000000052 comment=859e154bbc62b745dd137c33bc0ac35b0465e75b pontos-25.3.2/000077500000000000000000000000001476255566300131505ustar00rootroot00000000000000pontos-25.3.2/.github/000077500000000000000000000000001476255566300145105ustar00rootroot00000000000000pontos-25.3.2/.github/CODEOWNERS000066400000000000000000000000701476255566300161000ustar00rootroot00000000000000# default reviewers * @greenbone/devops pontos-25.3.2/.github/dependabot.yml000066400000000000000000000010411476255566300173340ustar00rootroot00000000000000version: 2 updates: - package-ecosystem: pip directory: "/" schedule: interval: weekly time: "04:00" open-pull-requests-limit: 10 allow: - dependency-type: direct - dependency-type: indirect commit-message: prefix: "Deps" groups: python-packages: patterns: - "*" - package-ecosystem: "github-actions" directory: "/" schedule: interval: "weekly" commit-message: prefix: "Deps" groups: actions: patterns: - "*" pontos-25.3.2/.github/workflows/000077500000000000000000000000001476255566300165455ustar00rootroot00000000000000pontos-25.3.2/.github/workflows/auto-merge.yml000066400000000000000000000003351476255566300213360ustar00rootroot00000000000000name: Auto-merge squash on: pull_request_target permissions: contents: write pull-requests: write jobs: auto-merge: uses: greenbone/workflows/.github/workflows/auto-merge.yml@main secrets: inherit pontos-25.3.2/.github/workflows/ci-python.yml000066400000000000000000000045071476255566300212100ustar00rootroot00000000000000name: Build and test on: push: branches: [main] pull_request: branches: [main] permissions: contents: read jobs: linting: name: Linting runs-on: "ubuntu-latest" strategy: matrix: python-version: - "3.9" - "3.10" - "3.11" - "3.12" steps: - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - name: Install and check with black, pylint and pontos.version uses: greenbone/actions/lint-python@v3 with: packages: pontos tests python-version: ${{ matrix.python-version }} linter: ruff check test: name: Run all tests runs-on: "ubuntu-latest" strategy: matrix: python-version: - "3.9" - "3.10" - "3.11" - "3.12" steps: - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - name: Install python, poetry and dependencies uses: greenbone/actions/poetry@v3 with: python-version: ${{ matrix.python-version }} - name: Run unit tests run: poetry run python -m unittest -v mypy: name: Check type hints runs-on: "ubuntu-latest" strategy: matrix: python-version: - "3.9" - "3.10" - "3.11" - "3.12" steps: - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - name: Run mypy uses: greenbone/actions/mypy-python@v3 with: python-version: ${{ matrix.python-version }} codecov: name: Upload coverage to codecov.io needs: test runs-on: "ubuntu-latest" steps: - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - name: Install and calculate and upload coverage to codecov.io uses: greenbone/actions/coverage-python@v3 with: python-version: "3.10" token: ${{ secrets.CODECOV_TOKEN }} check-version: name: Check versioning for consistency runs-on: "ubuntu-latest" steps: - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - name: Install python, poetry and dependencies uses: greenbone/actions/poetry@v3 - name: Check version run: | poetry run pontos-version verify current pontos-25.3.2/.github/workflows/codeql-analysis-python.yml000066400000000000000000000024721476255566300237040ustar00rootroot00000000000000# For most projects, this workflow file will not need changing; you simply need # to commit it to your repository. # # You may wish to alter this file to override the set of languages analyzed, # or to provide custom queries or build logic. # # ******** NOTE ******** # We have attempted to detect the languages in your repository. Please check # the `language` matrix defined below to confirm you have the correct set of # supported CodeQL languages. # name: "CodeQL" on: push: branches: [main] pull_request: branches: [main] paths-ignore: - "**/*.md" - "**/*.txt" schedule: - cron: "30 5 * * 0" # 5:30h on Sundays permissions: contents: read jobs: analyze: name: Analyze runs-on: ubuntu-latest permissions: actions: read contents: read security-events: write strategy: fail-fast: false matrix: language: ["python"] steps: - name: Checkout repository uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4 - name: Initialize CodeQL uses: github/codeql-action/init@0e346f2c4a1b999b44f1ef93fe08bdb83dae63ab #v3 with: languages: ${{ matrix.language }} - name: Perform CodeQL Analysis uses: github/codeql-action/analyze@0e346f2c4a1b999b44f1ef93fe08bdb83dae63ab #v3 pontos-25.3.2/.github/workflows/conventional-commits.yml000066400000000000000000000004631476255566300234430ustar00rootroot00000000000000name: Conventional Commits on: pull_request_target: permissions: pull-requests: write contents: read jobs: conventional-commits: name: Conventional Commits runs-on: ubuntu-latest steps: - name: Report Conventional Commits uses: greenbone/actions/conventional-commits@v3 pontos-25.3.2/.github/workflows/dependency-review.yml000066400000000000000000000003451476255566300227070ustar00rootroot00000000000000name: 'Dependency Review' on: [pull_request] permissions: contents: read jobs: dependency-review: runs-on: ubuntu-latest steps: - name: 'Dependency Review' uses: greenbone/actions/dependency-review@v3 pontos-25.3.2/.github/workflows/deploy-pypi.yml000066400000000000000000000012371476255566300215460ustar00rootroot00000000000000name: Deploy on PyPI on: release: types: [created] permissions: contents: read jobs: deploy: runs-on: ubuntu-latest environment: name: pypi url: https://pypi.org/project/pontos/ permissions: id-token: write steps: - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - name: Set up Python uses: greenbone/actions/poetry@v3 with: python-version: "3.10" install-dependencies: "false" - name: Build run: | poetry build - name: Publish uses: pypa/gh-action-pypi-publish@76f52bc884231f62b9a034ebfe128415bbaabdfc #v1.12.4 pontos-25.3.2/.github/workflows/github-pages.yml000066400000000000000000000023151476255566300216500ustar00rootroot00000000000000name: Deploy docs to GitHub Pages on: # Runs on pushes targeting the default branch push: branches: [main] # Allows you to run this workflow manually from the Actions tab workflow_dispatch: # Sets permissions of the GITHUB_TOKEN to allow deployment to GitHub Pages permissions: contents: read pages: write id-token: write # Allow one concurrent deployment concurrency: group: "pages" cancel-in-progress: true jobs: deploy: environment: name: github-pages url: ${{ steps.deployment.outputs.page_url }} runs-on: ubuntu-latest steps: - name: Checkout uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2 - name: Install poetry and dependencies uses: greenbone/actions/poetry@v3 with: python-version: "3.10" - name: Build Documentation run: | cd docs && poetry run make html - name: Upload artifact uses: actions/upload-pages-artifact@56afc609e74202658d3ffba0e8f6dda462b719fa #v3.0.1 with: path: "docs/_build/html" - name: Deploy to GitHub Pages id: deployment uses: actions/deploy-pages@d6db90164ac5ed86f2b6aed7e0febac5b3c0c03e #v4.0.5 pontos-25.3.2/.github/workflows/release.yml000066400000000000000000000056141476255566300207160ustar00rootroot00000000000000name: Release on: pull_request: types: [closed] workflow_dispatch: permissions: contents: read jobs: release: name: Pontos # If the event is a workflow_dispatch or the label 'make release' is set and PR is closed because of a merge if: (github.event_name == 'workflow_dispatch') || (contains( github.event.pull_request.labels.*.name, 'make release') && github.event.pull_request.merged == true) runs-on: "ubuntu-latest" steps: - name: Setting the Reference id: release run: | if [[ "${{ github.event_name }}" = "workflow_dispatch" ]]; then echo "ref=${{ github.ref_name }}" >> $GITHUB_OUTPUT else echo "ref=${{ github.base_ref }}" >> $GITHUB_OUTPUT fi - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4 with: fetch-depth: 0 persist-credentials: false ref: ${{ steps.release.outputs.ref }} - name: Set git name, mail and origin run: | git config --global user.name "${{ secrets.GREENBONE_BOT }}" git config --global user.email "${{ secrets.GREENBONE_BOT_MAIL }}" git remote set-url origin https://${{ secrets.GREENBONE_BOT_TOKEN }}@github.com/${{ github.repository }} - uses: greenbone/actions/poetry@v3 - name: Allow admin users bypassing protection on ${{ steps.release.outputs.ref }} branch run: | poetry run pontos-github-script pontos.github.scripts.enforce-admins ${{ github.repository }} ${{ steps.release.outputs.ref }} --allow env: GITHUB_USER: ${{ secrets.GREENBONE_BOT }} GITHUB_TOKEN: ${{ secrets.GREENBONE_BOT_TOKEN }} - name: Create release run: | poetry run pontos-release create --repository ${{ github.repository }} --release-type calendar env: GITHUB_USER: ${{ secrets.GREENBONE_BOT }} GITHUB_TOKEN: ${{ secrets.GREENBONE_BOT_TOKEN }} - name: Disable bypassing protection on ${{ steps.release.outputs.ref }} branch for admin users run: | poetry run pontos-github-script pontos.github.scripts.enforce-admins ${{ github.repository }} ${{ steps.release.outputs.ref }} --no-allow env: GITHUB_USER: ${{ secrets.GREENBONE_BOT }} GITHUB_TOKEN: ${{ secrets.GREENBONE_BOT_TOKEN }} - name: Import gpg key from secrets run: | echo -e "${{ secrets.GPG_KEY }}" >> tmp.file gpg --pinentry-mode loopback --passphrase ${{ secrets.GPG_PASSPHRASE }} --import tmp.file rm tmp.file - name: Sign assets for released version run: | poetry run pontos-release sign --repository ${{ github.repository }} --signing-key ${{ secrets.GPG_FINGERPRINT }} --passphrase ${{ secrets.GPG_PASSPHRASE }} env: GITHUB_USER: ${{ secrets.GREENBONE_BOT }} GITHUB_TOKEN: ${{ secrets.GREENBONE_BOT_TOKEN }} pontos-25.3.2/.github/workflows/sbom-upload.yml000066400000000000000000000004551476255566300215160ustar00rootroot00000000000000name: SBOM upload on: workflow_dispatch: push: branches: ["main"] permissions: contents: read jobs: SBOM-upload: runs-on: ubuntu-latest permissions: id-token: write contents: write steps: - name: 'SBOM upload' uses: greenbone/actions/sbom-upload@v3 pontos-25.3.2/.github/workflows/scorecard.yml000066400000000000000000000060151476255566300212370ustar00rootroot00000000000000# This workflow uses actions that are not certified by GitHub. They are provided # by a third-party and are governed by separate terms of service, privacy # policy, and support documentation. name: Scorecard supply-chain security on: # For Branch-Protection check. Only the default branch is supported. See # https://github.com/ossf/scorecard/blob/main/docs/checks.md#branch-protection branch_protection_rule: # To guarantee Maintained check is occasionally updated. See # https://github.com/ossf/scorecard/blob/main/docs/checks.md#maintained schedule: - cron: '31 19 * * 6' push: branches: [ "main" ] # Declare default permissions as read only. permissions: read-all jobs: analysis: name: Scorecard analysis runs-on: ubuntu-latest permissions: # Needed to upload the results to code-scanning dashboard. security-events: write # Needed to publish results and get a badge (see publish_results below). id-token: write # Uncomment the permissions below if installing in a private repository. # contents: read # actions: read steps: - name: "Checkout code" uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: persist-credentials: false - name: "Run analysis" uses: ossf/scorecard-action@f49aabe0b5af0936a0987cfb85d86b75731b0186 # v2.4.1 with: results_file: results.sarif results_format: sarif # (Optional) "write" PAT token. Uncomment the `repo_token` line below if: # - you want to enable the Branch-Protection check on a *public* repository, or # - you are installing Scorecard on a *private* repository # To create the PAT, follow the steps in https://github.com/ossf/scorecard-action?tab=readme-ov-file#authentication-with-fine-grained-pat-optional. # repo_token: ${{ secrets.SCORECARD_TOKEN }} # Public repositories: # - Publish results to OpenSSF REST API for easy access by consumers # - Allows the repository to include the Scorecard badge. # - See https://github.com/ossf/scorecard-action#publishing-results. # For private repositories `publish_results` will always be set to `false`, regardless of the value entered here. publish_results: true # Upload the results as artifacts (optional). Commenting out will disable uploads of run results in SARIF # format to the repository Actions tab. - name: "Upload artifact" uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4.6.1 with: name: SARIF file path: results.sarif retention-days: 5 # Upload the results to GitHub's code scanning dashboard (optional). # Commenting out will disable upload of results to your repo's Code Scanning dashboard - name: "Upload to code-scanning" uses: github/codeql-action/upload-sarif@afb54ba388a7dca6ecae48f608c4ff05ff4cc77a #v3 with: sarif_file: results.sarif pontos-25.3.2/.github/workflows/update-header.yml000066400000000000000000000006651476255566300220070ustar00rootroot00000000000000name: Update Headers on: workflow_dispatch: schedule: - cron: "0 0 1 1 *" # At 00:00 on day-of-month 1 in January. permissions: contents: read jobs: update-header: name: Update headers runs-on: "ubuntu-latest" permissions: contents: write steps: - name: Run update header uses: greenbone/actions/update-header@v3 with: directories: pontos tests target: main pontos-25.3.2/.gitignore000066400000000000000000000024221476255566300151400ustar00rootroot00000000000000# Byte-compiled / optimized / DLL files __pycache__/ *.py[cod] *$py.class # C extensions *.so # Distribution / packaging .Python build/ develop-eggs/ dist/ downloads/ eggs/ .eggs/ lib/ lib64/ parts/ sdist/ var/ wheels/ *.egg-info/ .installed.cfg *.egg MANIFEST # PyInstaller # Usually these files are written by a python script from a template # before PyInstaller builds the exe, so as to inject date/other infos into it. *.manifest *.spec # Installer logs pip-log.txt pip-delete-this-directory.txt # Unit test / coverage reports htmlcov/ .tox/ .coverage .coverage.* .cache nosetests.xml coverage.xml *.cover .hypothesis/ .pytest_cache/ # Translations *.mo *.pot # Django stuff: *.log local_settings.py db.sqlite3 # Flask stuff: instance/ .webassets-cache # Scrapy stuff: .scrapy # Sphinx documentation docs/_build/ # PyBuilder target/ # Jupyter Notebook .ipynb_checkpoints # pyenv .python-version # celery beat schedule file celerybeat-schedule # SageMath parsed files *.sage.py # Environments .env .venv env/ venv/ ENV/ env.bak/ venv.bak/ # Spyder project settings .spyderproject .spyproject # Rope project settings .ropeproject # mkdocs documentation /site # mypy .mypy_cache/ # ruff .ruff_cache/ # vscode settings .vscode # vim settings in virtualenv .vim process.log pontos-25.3.2/.pontos-header-ignore000066400000000000000000000000341476255566300171770ustar00rootroot00000000000000*/templates/* __version__.pypontos-25.3.2/LICENSE000066400000000000000000001045151476255566300141630ustar00rootroot00000000000000 GNU GENERAL PUBLIC LICENSE Version 3, 29 June 2007 Copyright (C) 2007 Free Software Foundation, Inc. Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. Preamble The GNU General Public License is a free, copyleft license for software and other kinds of works. The licenses for most software and other practical works are designed to take away your freedom to share and change the works. By contrast, the GNU General Public License is intended to guarantee your freedom to share and change all versions of a program--to make sure it remains free software for all its users. We, the Free Software Foundation, use the GNU General Public License for most of our software; it applies also to any other work released this way by its authors. You can apply it to your programs, too. When we speak of free software, we are referring to freedom, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for them if you wish), that you receive source code or can get it if you want it, that you can change the software or use pieces of it in new free programs, and that you know you can do these things. To protect your rights, we need to prevent others from denying you these rights or asking you to surrender the rights. Therefore, you have certain responsibilities if you distribute copies of the software, or if you modify it: responsibilities to respect the freedom of others. For example, if you distribute copies of such a program, whether gratis or for a fee, you must pass on to the recipients the same freedoms that you received. You must make sure that they, too, receive or can get the source code. And you must show them these terms so they know their rights. Developers that use the GNU GPL protect your rights with two steps: (1) assert copyright on the software, and (2) offer you this License giving you legal permission to copy, distribute and/or modify it. For the developers' and authors' protection, the GPL clearly explains that there is no warranty for this free software. For both users' and authors' sake, the GPL requires that modified versions be marked as changed, so that their problems will not be attributed erroneously to authors of previous versions. Some devices are designed to deny users access to install or run modified versions of the software inside them, although the manufacturer can do so. This is fundamentally incompatible with the aim of protecting users' freedom to change the software. The systematic pattern of such abuse occurs in the area of products for individuals to use, which is precisely where it is most unacceptable. Therefore, we have designed this version of the GPL to prohibit the practice for those products. If such problems arise substantially in other domains, we stand ready to extend this provision to those domains in future versions of the GPL, as needed to protect the freedom of users. Finally, every program is threatened constantly by software patents. States should not allow patents to restrict development and use of software on general-purpose computers, but in those that do, we wish to avoid the special danger that patents applied to a free program could make it effectively proprietary. To prevent this, the GPL assures that patents cannot be used to render the program non-free. The precise terms and conditions for copying, distribution and modification follow. TERMS AND CONDITIONS 0. Definitions. "This License" refers to version 3 of the GNU General Public License. "Copyright" also means copyright-like laws that apply to other kinds of works, such as semiconductor masks. "The Program" refers to any copyrightable work licensed under this License. Each licensee is addressed as "you". "Licensees" and "recipients" may be individuals or organizations. To "modify" a work means to copy from or adapt all or part of the work in a fashion requiring copyright permission, other than the making of an exact copy. The resulting work is called a "modified version" of the earlier work or a work "based on" the earlier work. A "covered work" means either the unmodified Program or a work based on the Program. To "propagate" a work means to do anything with it that, without permission, would make you directly or secondarily liable for infringement under applicable copyright law, except executing it on a computer or modifying a private copy. Propagation includes copying, distribution (with or without modification), making available to the public, and in some countries other activities as well. To "convey" a work means any kind of propagation that enables other parties to make or receive copies. Mere interaction with a user through a computer network, with no transfer of a copy, is not conveying. An interactive user interface displays "Appropriate Legal Notices" to the extent that it includes a convenient and prominently visible feature that (1) displays an appropriate copyright notice, and (2) tells the user that there is no warranty for the work (except to the extent that warranties are provided), that licensees may convey the work under this License, and how to view a copy of this License. If the interface presents a list of user commands or options, such as a menu, a prominent item in the list meets this criterion. 1. Source Code. The "source code" for a work means the preferred form of the work for making modifications to it. "Object code" means any non-source form of a work. A "Standard Interface" means an interface that either is an official standard defined by a recognized standards body, or, in the case of interfaces specified for a particular programming language, one that is widely used among developers working in that language. The "System Libraries" of an executable work include anything, other than the work as a whole, that (a) is included in the normal form of packaging a Major Component, but which is not part of that Major Component, and (b) serves only to enable use of the work with that Major Component, or to implement a Standard Interface for which an implementation is available to the public in source code form. A "Major Component", in this context, means a major essential component (kernel, window system, and so on) of the specific operating system (if any) on which the executable work runs, or a compiler used to produce the work, or an object code interpreter used to run it. The "Corresponding Source" for a work in object code form means all the source code needed to generate, install, and (for an executable work) run the object code and to modify the work, including scripts to control those activities. However, it does not include the work's System Libraries, or general-purpose tools or generally available free programs which are used unmodified in performing those activities but which are not part of the work. For example, Corresponding Source includes interface definition files associated with source files for the work, and the source code for shared libraries and dynamically linked subprograms that the work is specifically designed to require, such as by intimate data communication or control flow between those subprograms and other parts of the work. The Corresponding Source need not include anything that users can regenerate automatically from other parts of the Corresponding Source. The Corresponding Source for a work in source code form is that same work. 2. Basic Permissions. All rights granted under this License are granted for the term of copyright on the Program, and are irrevocable provided the stated conditions are met. This License explicitly affirms your unlimited permission to run the unmodified Program. The output from running a covered work is covered by this License only if the output, given its content, constitutes a covered work. This License acknowledges your rights of fair use or other equivalent, as provided by copyright law. You may make, run and propagate covered works that you do not convey, without conditions so long as your license otherwise remains in force. You may convey covered works to others for the sole purpose of having them make modifications exclusively for you, or provide you with facilities for running those works, provided that you comply with the terms of this License in conveying all material for which you do not control copyright. Those thus making or running the covered works for you must do so exclusively on your behalf, under your direction and control, on terms that prohibit them from making any copies of your copyrighted material outside their relationship with you. Conveying under any other circumstances is permitted solely under the conditions stated below. Sublicensing is not allowed; section 10 makes it unnecessary. 3. Protecting Users' Legal Rights From Anti-Circumvention Law. No covered work shall be deemed part of an effective technological measure under any applicable law fulfilling obligations under article 11 of the WIPO copyright treaty adopted on 20 December 1996, or similar laws prohibiting or restricting circumvention of such measures. When you convey a covered work, you waive any legal power to forbid circumvention of technological measures to the extent such circumvention is effected by exercising rights under this License with respect to the covered work, and you disclaim any intention to limit operation or modification of the work as a means of enforcing, against the work's users, your or third parties' legal rights to forbid circumvention of technological measures. 4. Conveying Verbatim Copies. You may convey verbatim copies of the Program's source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice; keep intact all notices stating that this License and any non-permissive terms added in accord with section 7 apply to the code; keep intact all notices of the absence of any warranty; and give all recipients a copy of this License along with the Program. You may charge any price or no price for each copy that you convey, and you may offer support or warranty protection for a fee. 5. Conveying Modified Source Versions. You may convey a work based on the Program, or the modifications to produce it from the Program, in the form of source code under the terms of section 4, provided that you also meet all of these conditions: a) The work must carry prominent notices stating that you modified it, and giving a relevant date. b) The work must carry prominent notices stating that it is released under this License and any conditions added under section 7. This requirement modifies the requirement in section 4 to "keep intact all notices". c) You must license the entire work, as a whole, under this License to anyone who comes into possession of a copy. This License will therefore apply, along with any applicable section 7 additional terms, to the whole of the work, and all its parts, regardless of how they are packaged. This License gives no permission to license the work in any other way, but it does not invalidate such permission if you have separately received it. d) If the work has interactive user interfaces, each must display Appropriate Legal Notices; however, if the Program has interactive interfaces that do not display Appropriate Legal Notices, your work need not make them do so. A compilation of a covered work with other separate and independent works, which are not by their nature extensions of the covered work, and which are not combined with it such as to form a larger program, in or on a volume of a storage or distribution medium, is called an "aggregate" if the compilation and its resulting copyright are not used to limit the access or legal rights of the compilation's users beyond what the individual works permit. Inclusion of a covered work in an aggregate does not cause this License to apply to the other parts of the aggregate. 6. Conveying Non-Source Forms. You may convey a covered work in object code form under the terms of sections 4 and 5, provided that you also convey the machine-readable Corresponding Source under the terms of this License, in one of these ways: a) Convey the object code in, or embodied in, a physical product (including a physical distribution medium), accompanied by the Corresponding Source fixed on a durable physical medium customarily used for software interchange. b) Convey the object code in, or embodied in, a physical product (including a physical distribution medium), accompanied by a written offer, valid for at least three years and valid for as long as you offer spare parts or customer support for that product model, to give anyone who possesses the object code either (1) a copy of the Corresponding Source for all the software in the product that is covered by this License, on a durable physical medium customarily used for software interchange, for a price no more than your reasonable cost of physically performing this conveying of source, or (2) access to copy the Corresponding Source from a network server at no charge. c) Convey individual copies of the object code with a copy of the written offer to provide the Corresponding Source. This alternative is allowed only occasionally and noncommercially, and only if you received the object code with such an offer, in accord with subsection 6b. d) Convey the object code by offering access from a designated place (gratis or for a charge), and offer equivalent access to the Corresponding Source in the same way through the same place at no further charge. You need not require recipients to copy the Corresponding Source along with the object code. If the place to copy the object code is a network server, the Corresponding Source may be on a different server (operated by you or a third party) that supports equivalent copying facilities, provided you maintain clear directions next to the object code saying where to find the Corresponding Source. Regardless of what server hosts the Corresponding Source, you remain obligated to ensure that it is available for as long as needed to satisfy these requirements. e) Convey the object code using peer-to-peer transmission, provided you inform other peers where the object code and Corresponding Source of the work are being offered to the general public at no charge under subsection 6d. A separable portion of the object code, whose source code is excluded from the Corresponding Source as a System Library, need not be included in conveying the object code work. A "User Product" is either (1) a "consumer product", which means any tangible personal property which is normally used for personal, family, or household purposes, or (2) anything designed or sold for incorporation into a dwelling. In determining whether a product is a consumer product, doubtful cases shall be resolved in favor of coverage. For a particular product received by a particular user, "normally used" refers to a typical or common use of that class of product, regardless of the status of the particular user or of the way in which the particular user actually uses, or expects or is expected to use, the product. A product is a consumer product regardless of whether the product has substantial commercial, industrial or non-consumer uses, unless such uses represent the only significant mode of use of the product. "Installation Information" for a User Product means any methods, procedures, authorization keys, or other information required to install and execute modified versions of a covered work in that User Product from a modified version of its Corresponding Source. The information must suffice to ensure that the continued functioning of the modified object code is in no case prevented or interfered with solely because modification has been made. If you convey an object code work under this section in, or with, or specifically for use in, a User Product, and the conveying occurs as part of a transaction in which the right of possession and use of the User Product is transferred to the recipient in perpetuity or for a fixed term (regardless of how the transaction is characterized), the Corresponding Source conveyed under this section must be accompanied by the Installation Information. But this requirement does not apply if neither you nor any third party retains the ability to install modified object code on the User Product (for example, the work has been installed in ROM). The requirement to provide Installation Information does not include a requirement to continue to provide support service, warranty, or updates for a work that has been modified or installed by the recipient, or for the User Product in which it has been modified or installed. Access to a network may be denied when the modification itself materially and adversely affects the operation of the network or violates the rules and protocols for communication across the network. Corresponding Source conveyed, and Installation Information provided, in accord with this section must be in a format that is publicly documented (and with an implementation available to the public in source code form), and must require no special password or key for unpacking, reading or copying. 7. Additional Terms. "Additional permissions" are terms that supplement the terms of this License by making exceptions from one or more of its conditions. Additional permissions that are applicable to the entire Program shall be treated as though they were included in this License, to the extent that they are valid under applicable law. If additional permissions apply only to part of the Program, that part may be used separately under those permissions, but the entire Program remains governed by this License without regard to the additional permissions. When you convey a copy of a covered work, you may at your option remove any additional permissions from that copy, or from any part of it. (Additional permissions may be written to require their own removal in certain cases when you modify the work.) You may place additional permissions on material, added by you to a covered work, for which you have or can give appropriate copyright permission. Notwithstanding any other provision of this License, for material you add to a covered work, you may (if authorized by the copyright holders of that material) supplement the terms of this License with terms: a) Disclaiming warranty or limiting liability differently from the terms of sections 15 and 16 of this License; or b) Requiring preservation of specified reasonable legal notices or author attributions in that material or in the Appropriate Legal Notices displayed by works containing it; or c) Prohibiting misrepresentation of the origin of that material, or requiring that modified versions of such material be marked in reasonable ways as different from the original version; or d) Limiting the use for publicity purposes of names of licensors or authors of the material; or e) Declining to grant rights under trademark law for use of some trade names, trademarks, or service marks; or f) Requiring indemnification of licensors and authors of that material by anyone who conveys the material (or modified versions of it) with contractual assumptions of liability to the recipient, for any liability that these contractual assumptions directly impose on those licensors and authors. All other non-permissive additional terms are considered "further restrictions" within the meaning of section 10. If the Program as you received it, or any part of it, contains a notice stating that it is governed by this License along with a term that is a further restriction, you may remove that term. If a license document contains a further restriction but permits relicensing or conveying under this License, you may add to a covered work material governed by the terms of that license document, provided that the further restriction does not survive such relicensing or conveying. If you add terms to a covered work in accord with this section, you must place, in the relevant source files, a statement of the additional terms that apply to those files, or a notice indicating where to find the applicable terms. Additional terms, permissive or non-permissive, may be stated in the form of a separately written license, or stated as exceptions; the above requirements apply either way. 8. Termination. You may not propagate or modify a covered work except as expressly provided under this License. Any attempt otherwise to propagate or modify it is void, and will automatically terminate your rights under this License (including any patent licenses granted under the third paragraph of section 11). However, if you cease all violation of this License, then your license from a particular copyright holder is reinstated (a) provisionally, unless and until the copyright holder explicitly and finally terminates your license, and (b) permanently, if the copyright holder fails to notify you of the violation by some reasonable means prior to 60 days after the cessation. Moreover, your license from a particular copyright holder is reinstated permanently if the copyright holder notifies you of the violation by some reasonable means, this is the first time you have received notice of violation of this License (for any work) from that copyright holder, and you cure the violation prior to 30 days after your receipt of the notice. Termination of your rights under this section does not terminate the licenses of parties who have received copies or rights from you under this License. If your rights have been terminated and not permanently reinstated, you do not qualify to receive new licenses for the same material under section 10. 9. Acceptance Not Required for Having Copies. You are not required to accept this License in order to receive or run a copy of the Program. Ancillary propagation of a covered work occurring solely as a consequence of using peer-to-peer transmission to receive a copy likewise does not require acceptance. However, nothing other than this License grants you permission to propagate or modify any covered work. These actions infringe copyright if you do not accept this License. Therefore, by modifying or propagating a covered work, you indicate your acceptance of this License to do so. 10. Automatic Licensing of Downstream Recipients. Each time you convey a covered work, the recipient automatically receives a license from the original licensors, to run, modify and propagate that work, subject to this License. You are not responsible for enforcing compliance by third parties with this License. An "entity transaction" is a transaction transferring control of an organization, or substantially all assets of one, or subdividing an organization, or merging organizations. If propagation of a covered work results from an entity transaction, each party to that transaction who receives a copy of the work also receives whatever licenses to the work the party's predecessor in interest had or could give under the previous paragraph, plus a right to possession of the Corresponding Source of the work from the predecessor in interest, if the predecessor has it or can get it with reasonable efforts. You may not impose any further restrictions on the exercise of the rights granted or affirmed under this License. For example, you may not impose a license fee, royalty, or other charge for exercise of rights granted under this License, and you may not initiate litigation (including a cross-claim or counterclaim in a lawsuit) alleging that any patent claim is infringed by making, using, selling, offering for sale, or importing the Program or any portion of it. 11. Patents. A "contributor" is a copyright holder who authorizes use under this License of the Program or a work on which the Program is based. The work thus licensed is called the contributor's "contributor version". A contributor's "essential patent claims" are all patent claims owned or controlled by the contributor, whether already acquired or hereafter acquired, that would be infringed by some manner, permitted by this License, of making, using, or selling its contributor version, but do not include claims that would be infringed only as a consequence of further modification of the contributor version. For purposes of this definition, "control" includes the right to grant patent sublicenses in a manner consistent with the requirements of this License. Each contributor grants you a non-exclusive, worldwide, royalty-free patent license under the contributor's essential patent claims, to make, use, sell, offer for sale, import and otherwise run, modify and propagate the contents of its contributor version. In the following three paragraphs, a "patent license" is any express agreement or commitment, however denominated, not to enforce a patent (such as an express permission to practice a patent or covenant not to sue for patent infringement). To "grant" such a patent license to a party means to make such an agreement or commitment not to enforce a patent against the party. If you convey a covered work, knowingly relying on a patent license, and the Corresponding Source of the work is not available for anyone to copy, free of charge and under the terms of this License, through a publicly available network server or other readily accessible means, then you must either (1) cause the Corresponding Source to be so available, or (2) arrange to deprive yourself of the benefit of the patent license for this particular work, or (3) arrange, in a manner consistent with the requirements of this License, to extend the patent license to downstream recipients. "Knowingly relying" means you have actual knowledge that, but for the patent license, your conveying the covered work in a country, or your recipient's use of the covered work in a country, would infringe one or more identifiable patents in that country that you have reason to believe are valid. If, pursuant to or in connection with a single transaction or arrangement, you convey, or propagate by procuring conveyance of, a covered work, and grant a patent license to some of the parties receiving the covered work authorizing them to use, propagate, modify or convey a specific copy of the covered work, then the patent license you grant is automatically extended to all recipients of the covered work and works based on it. A patent license is "discriminatory" if it does not include within the scope of its coverage, prohibits the exercise of, or is conditioned on the non-exercise of one or more of the rights that are specifically granted under this License. You may not convey a covered work if you are a party to an arrangement with a third party that is in the business of distributing software, under which you make payment to the third party based on the extent of your activity of conveying the work, and under which the third party grants, to any of the parties who would receive the covered work from you, a discriminatory patent license (a) in connection with copies of the covered work conveyed by you (or copies made from those copies), or (b) primarily for and in connection with specific products or compilations that contain the covered work, unless you entered into that arrangement, or that patent license was granted, prior to 28 March 2007. Nothing in this License shall be construed as excluding or limiting any implied license or other defenses to infringement that may otherwise be available to you under applicable patent law. 12. No Surrender of Others' Freedom. If conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot convey a covered work so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not convey it at all. For example, if you agree to terms that obligate you to collect a royalty for further conveying from those to whom you convey the Program, the only way you could satisfy both those terms and this License would be to refrain entirely from conveying the Program. 13. Use with the GNU Affero General Public License. Notwithstanding any other provision of this License, you have permission to link or combine any covered work with a work licensed under version 3 of the GNU Affero General Public License into a single combined work, and to convey the resulting work. The terms of this License will continue to apply to the part which is the covered work, but the special requirements of the GNU Affero General Public License, section 13, concerning interaction through a network will apply to the combination as such. 14. Revised Versions of this License. The Free Software Foundation may publish revised and/or new versions of the GNU General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns. Each version is given a distinguishing version number. If the Program specifies that a certain numbered version of the GNU General Public License "or any later version" applies to it, you have the option of following the terms and conditions either of that numbered version or of any later version published by the Free Software Foundation. If the Program does not specify a version number of the GNU General Public License, you may choose any version ever published by the Free Software Foundation. If the Program specifies that a proxy can decide which future versions of the GNU General Public License can be used, that proxy's public statement of acceptance of a version permanently authorizes you to choose that version for the Program. Later license versions may give you additional or different permissions. However, no additional obligations are imposed on any author or copyright holder as a result of your choosing to follow a later version. 15. Disclaimer of Warranty. THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. 16. Limitation of Liability. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. 17. Interpretation of Sections 15 and 16. If the disclaimer of warranty and limitation of liability provided above cannot be given local legal effect according to their terms, reviewing courts shall apply local law that most closely approximates an absolute waiver of all civil liability in connection with the Program, unless a warranty or assumption of liability accompanies a copy of the Program in return for a fee. END OF TERMS AND CONDITIONS How to Apply These Terms to Your New Programs If you develop a new program, and you want it to be of the greatest possible use to the public, the best way to achieve this is to make it free software which everyone can redistribute and change under these terms. To do so, attach the following notices to the program. It is safest to attach them to the start of each source file to most effectively state the exclusion of warranty; and each file should have at least the "copyright" line and a pointer to where the full notice is found. Copyright (C) This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see . Also add information on how to contact you by electronic and paper mail. If the program does terminal interaction, make it output a short notice like this when it starts in an interactive mode: Copyright (C) This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. This is free software, and you are welcome to redistribute it under certain conditions; type `show c' for details. The hypothetical commands `show w' and `show c' should show the appropriate parts of the General Public License. Of course, your program's commands might be different; for a GUI interface, you would use an "about box". You should also get your employer (if you work as a programmer) or school, if any, to sign a "copyright disclaimer" for the program, if necessary. For more information on this, and how to apply and follow the GNU GPL, see . The GNU General Public License does not permit incorporating your program into proprietary programs. If your program is a subroutine library, you may consider it more useful to permit linking proprietary applications with the library. If this is what you want to do, use the GNU Lesser General Public License instead of this License. But first, please read . pontos-25.3.2/README.md000066400000000000000000000114011476255566300144240ustar00rootroot00000000000000![Greenbone Logo](https://www.greenbone.net/wp-content/uploads/gb_new-logo_horizontal_rgb_small.png) # Pontos - Greenbone Python Utilities and Tools [![GitHub releases](https://img.shields.io/github/release/greenbone/pontos.svg)](https://github.com/greenbone/pontos/releases) [![PyPI release](https://img.shields.io/pypi/v/pontos.svg)](https://pypi.org/project/pontos/) [![code test coverage](https://codecov.io/gh/greenbone/pontos/branch/main/graph/badge.svg)](https://codecov.io/gh/greenbone/pontos) [![Build and test](https://github.com/greenbone/pontos/actions/workflows/ci-python.yml/badge.svg)](https://github.com/greenbone/pontos/actions/workflows/ci-python.yml) The **pontos** Python package is a collection of utilities, tools, classes and functions maintained by [Greenbone]. Pontos is the German name of the Greek titan [Pontus](https://en.wikipedia.org/wiki/Pontus_(mythology)), the titan of the sea. ## Table of Contents - [Documentation](#documentation) - [Installation](#installation) - [Requirements](#requirements) - [Install using pipx](#install-using-pipx) - [Install using pip](#install-using-pip) - [Install using poetry](#install-using-poetry) - [Command Completion](#command-completion) - [Setup for bash](#setup-for-bash) - [Setup for zsh](#setup-for-zsh) - [Development](#development) - [Maintainer](#maintainer) - [Contributing](#contributing) - [License](#license) ## Documentation The documentation for pontos can be found at https://greenbone.github.io/pontos/. Please refer to the documentation for more details as this README just gives a short overview. ## Installation ### Requirements Python 3.9 and later is supported. ### Install using pipx You can install the latest stable release of **pontos** from the Python Package Index (pypi) using [pipx] python3 -m pipx install pontos ### Install using pip > [!NOTE] > The `pip install` command does no longer work out-of-the-box in newer > distributions like Ubuntu 23.04 because of [PEP 668](https://peps.python.org/pep-0668). > Please use the [installation via pipx](#install-using-pipx) instead. You can install the latest stable release of **pontos** from the Python Package Index (pypi) using [pip] python3 -m pip install --user pontos ### Install using poetry Because **pontos** is a Python library you most likely need a tool to handle Python package dependencies and Python environments. Therefore we strongly recommend using [poetry]. You can install the latest stable release of **pontos** and add it as a dependency for your current project using [poetry] poetry add pontos ## Command Completion `pontos` comes with support for command line completion in bash and zsh. All pontos CLI commands support shell completion. As examples the following sections explain how to set up the completion for `pontos-release` with bash and zsh. ### Setup for bash ```bash echo "source ~/.pontos-release-complete.bash" >> ~/.bashrc pontos-release --print-completion bash > ~/.pontos-release-complete.bash ``` Alternatively, you can use the result of the completion command directly with the eval function of your bash shell: ```bash eval "$(pontos-release --print-completion bash)" ``` ### Setup for zsh ```zsh echo 'fpath=("$HOME/.zsh.d" $fpath)' >> ~/.zsh mkdir -p ~/.zsh.d/ pontos-release --print-completion zsh > ~/.zsh.d/_pontos_release ``` Alternatively, you can use the result of the completion command directly with the eval function of your zsh shell: ```bash eval "$(pontos-release --print-completion zsh)" ``` ## Development **pontos** uses [poetry] for its own dependency management and build process. First install poetry via [pipx] python3 -m pipx install poetry Afterwards run poetry install in the checkout directory of **pontos** (the directory containing the `pyproject.toml` file) to install all dependencies including the packages only required for development. Afterwards activate the git hooks for auto-formatting and linting via [autohooks]. poetry run autohooks activate Validate the activated git hooks by running poetry run autohooks check ## Maintainer This project is maintained by [Greenbone AG][Greenbone] ## Contributing Your contributions are highly appreciated. Please [create a pull request](https://github.com/greenbone/pontos/pulls) on GitHub. Bigger changes need to be discussed with the development team via the [issues section at GitHub](https://github.com/greenbone/pontos/issues) first. ## License Copyright (C) 2020-2024 [Greenbone AG][Greenbone] Licensed under the [GNU General Public License v3.0 or later](LICENSE). [Greenbone]: https://www.greenbone.net/ [poetry]: https://python-poetry.org/ [pip]: https://pip.pypa.io/ [pipx]: https://pypa.github.io/pipx/ [autohooks]: https://github.com/greenbone/autohooks pontos-25.3.2/docs/000077500000000000000000000000001476255566300141005ustar00rootroot00000000000000pontos-25.3.2/docs/Makefile000066400000000000000000000013201476255566300155340ustar00rootroot00000000000000# Minimal makefile for Sphinx documentation # # You can set these variables from the command line, and also # from the environment for the first two. SPHINXOPTS ?= SPHINXBUILD ?= sphinx-build SOURCEDIR = . BUILDDIR = _build # Put it first so that "make" without argument is like "make help". help: @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) .PHONY: help livehtml Makefile livehtml: sphinx-autobuild "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) # Catch-all target: route all unknown targets to Sphinx using the new # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). %: Makefile @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) pontos-25.3.2/docs/_static/000077500000000000000000000000001476255566300155265ustar00rootroot00000000000000pontos-25.3.2/docs/_static/custom.css000066400000000000000000000107101476255566300175510ustar00rootroot00000000000000div.document { width: 1200px; } body { font-family: Helvetica, Verdana; } div.body { max-width: 1200px; } h1, h2, h3, h4, h5, h6 { font-weight:500; } div.body h1, div.body h2, div.body h3, div.body h4, div.body h5, div.body h6 { font-family: Helvetica, Verdana; } .sphinx-tabs-tab { color:var(--color-content-foreground); font-family: Helvetica, Verdana; } a { color: var(--color-content-foreground); text-decoration: none; border-bottom: 1px dotted var(--color-content-foreground); } a:hover { color: #11AB51; border-bottom: 1px solid var(--color-content-foreground); } .related-pages a .page-info .title, .bottom-of-page a { color: var(--color-foreground-secondary); text-decoration: none; border-bottom: 1px dotted var(--color-foreground-secondary); } .related-pages a:hover .page-info .title, .bottom-of-page a:hover { text-decoration: none; color: #11AB51; border-bottom: 1px solid var(--color-foreground-secondary); } a.muted-link { border-bottom:0; } a.muted-link:hover { border-bottom:0; color:#11AB51; } a.sidebar-brand, .toctree-l1 > a.reference, .toc-tree a.reference { border-bottom: 0; } .toctree-l1 > a.reference:hover, .toc-tree a.reference:hover { border-bottom: 0; color: #11AB51; } .related-pages a { border-bottom:0; } .related-pages a:hover { border-bottom:0; } .toctree-wrapper .caption-text, .sidebar-tree .caption-text { font-size:2em; font-weight:500; color: var(--color-content-foreground); } .sidebar-brand-text { text-align:left; } .sidebar-search { color:var(--color-content-foreground); } .sidebar-tree .toctree-l1 > .reference { color: var(--color-content-foreground); } .toc-tree li.scroll-current > .reference { color: var(--color-content-foreground); } .toc-tree .reference { color: var(--color-content-foreground); font-size:1.1em; } .toc-title { color: var(--color-content-foreground); font-size:1.2em; } div.admonition { background-color:var(--color-background-secondary); padding: 20px 20px; border-radius: 4px; } .admonition.note > .admonition-title, .admonition.warning > .admonition-title, .admonition.tip > .admonition-title, .admonition.important > .admonition-title, .admonition.hint > .admonition-title { background-color:var(--color-background-secondary); font-family: Helvetica, Verdana; font-weight: bold; font-size:1em; } div.body p.caption { font-size: 240%; margin-bottom: 0px; } div.sphinxsidebar h3 { font-family: Helvetica, Verdana; } div.sphinxsidebar a { border: 0px; } div.sphinxsidebarwrapper p.description { font-size: 15px; } div.sphinxsidebarwrapper h1.logo { font-size: 25px; } div.sphinxsidebarwrapper h1.logo-name { margin-top:50px; } div.sphinxsidebarwrapper p.description { margin-bottom: 50px; } div.sphinxsidebarwrapper img { max-width: 70%; } .highlight-shell-session .go::before { content: ">"; padding-right: 1em; } .highlight pre { border-radius: 6px; font-family: Consolas,Monaco,Andale Mono,Ubuntu Mono,monospace; font-size: 0.9em; line-height: 1.2; padding: 1.25rem 1.5rem; } .code-block-caption { font-size:0.7em; background-color: var(--color-background-secondary); color: var(--color-foreground-primary); } .code-block-caption a { color:var(--color-foreground-primary); } .code-block-caption a:hover { color:#11AB51; } div.edit-this-page > a.muted-link, svg.icon-tabler-shadow, svg.theme-icon-when-dark, svg.theme-icon-when-light { color:var(--color-content-foreground); } div.edit-this-page > a.muted-link:hover, svg.icon.icon-tabler.icon-tabler-copy:hover, svg.icon-tabler-shadow:hover, svg.theme-icon-when-dark:hover, svg.theme-icon-when-light:hover { color:#11AB51; } .highlight { background-color: var(--color-code-background); color:var(--color-code-foreground); } .highlight button.copybtn { background-color:transparent; } .highlight button.copybtn:hover { background-color:transparent; } svg.icon.icon-tabler.icon-tabler-copy { color: var(--color-code-foreground); } .highlight .go { color: var(--color-code-foreground); font-weight: normal; } .highlight .l { color: var(--color-code-foreground); } #contents.toc { background-color: #ffffff; border: none; } .back-to-top { background-color:var(--color-background-secondary); } div.topic { border-radius: 4px; } div.topic a.reference.internal { border: 0px; } div.toctree-wrapper.compound a.reference.internal { border: 0px; } #indices-and-tables a.reference.internal { border: 0px; } .sidebar-logo { max-width: 70%; } pontos-25.3.2/docs/_static/greenbone.png000066400000000000000000000575161476255566300202160ustar00rootroot00000000000000‰PNG  IHDRKD!ó_ IDATxœì½¸Gu8~fvo{ý===Iïɲ$I–;î°qÇ1½:àPì$´ =€ Õ¡%@h!L³±±qllã&YͲ,Yý=õ×nÙÝ9¿ofgfÏÌî}²ó_¾ÿ÷±þäwïÝÝ™3gΜ>gàÏן¯?_¾þ|ýùúóõçëÿ·+|ddÄùŽúÁì/Ó¯¢~†Þ6WÁ;s¢(zU¥RþÜèØ.h5›0kÖ,(—ʈc¿DÄÆØÏ8çŸæœC£Ù„=»wÿŸÎÁ¼áyÐQë€8Ž%ŒîMDý[³ÙÄkQCµRF³¡~ëìèd š”Ëeõ›HDqA€«‚´«ç‚ô'Ÿ‘ïÉßäqA”$P BÂÀÞîëßã$Qý˾ä?ù¼yÏÀ.¯0 !Iû›|¶¿¿*•Šjtjj&ÆÇìj,B@©TR0J<ÈÏæ½(ŠTûò7ùwÇÎP­Va @õFJ0²ù]Ò…|F>/aqÓ¾9Aµj…žŠl<²ïV«•⋌Ïâ(›º }5 5Ö€ ! Ÿ˜Pc–W©\†þ¾>…'Iòo:ïp¨aÈ1$jNêõºj¯–Ô˜Œò©6‹Š‘94ó¨ùOq"ÛvÆ… Æºwß^ÝK¨‚R)„éFD’@‡¤GêžlCŽA¶aèHŽG¶'/9Ç;wî,„3,úÑ4öŒ8FÛç( žá»éãÆØg“D¼:àü9òWI•rE"-Âà’é}015~úì¹s/€×ÀúgéŸöÂt;óg±ü-^Ð:%&òE£ùH‡i"ÇÐÈKHðï|&ŒFö‡æ3s;D¯ôaóú¤°!Ä #r¦á-†Ù¥¦ÿ³cÔ Õ§53®d„I¤x‡T4š¡8 áÑ´i¦„1 LˆyÜ‹i³¶ gÙ˜È|úófð£o‰Yò`AJˆ=QMûÍ-g–µQ´¢}ÁSt3™(v:x6ƒü::Då±Ï²ì6g,’ܸ1Q?¡§§ûÝ==—ʇ”Àê¦vö/œ;w^ÿaÑÆß­<½oÞìÀ±°ùÙCû¿w!³‹Õ[QäÐßÕ„ËÅï=j%[›¹É¤p¾Jìó3“€fÞôMÌÏKäÌë ˆ-%Ût,H~1Qfús°ä?O‰Ý2\ó jÀ…‡ 9n­2‡9xLׯmÁ ÛW/n.à sÁœÇÀ¡¡W“§Ð‡±ˆ¶Ü+‡w_žyÚœÏ:é°1÷»3™$Îw@ÔÈvŸa&:Ç“÷¥Êao¹³,õHÍø’RGùZ‹/Jõ—qÎ÷Nì_~ì}í@<ïÃSÉS|¢wî¼¹÷uww„RÕ.0ár:¯ß\EÏÑÉÌëÎVå–ªc:>wö™•,ífsŸÑjD•T¬êL¤x‹) c$ÆlHA+Q…Š(5Wòp3wE8¿<Ñæs­[j%ï<éðBÌÔùLAôLBÄj‘V…¾{Ñ~k@…Àfc6 —¶Sô’ù3Þž‰QA†é³ˆyf_¤õPÌp‹æ(…"ZÉ®B&ÓÕÕ5ÃpHÃ7ˆžÕ%Ç–DI’|Ñõ‰;¶tLøB©£r;c|¨)S·£ ŒÕûËÅÓýOŒ5{¦†{‡z¿Çñ5H¤KÑò.dŠÄtÈ«¤žj¬?3¯-åwP‹¨½ê]t¹Û…ÓJ§"d{ZaÑDÉĈf@‡—cÆ1gÒ•åúf2Ș¢U4ÚÓzn y_éØa¦Dêcí'ƒ sÂ2×=sŸÍ5†tl®ùY0 òj{Bh˲¼•í7‘šä¶ÌðTˆnÚ*¾ìðûý™KÚѦ­XßÂËzö'Ù‡M/ê$.bVt¡ÇÓÍ9agV]ÐÇ–ok47?¹©2Ü=|3KÄ‘Bˆ@Wu¤š N×yé±õß¼ª¢ë­Óõéë°Æù™!ÈíÕUHÈ=$9ÃÏîo¶‘ÀA4‰LÔç†áø0ÈB Ì«pF3IXª%x̽ëXól†Å¤è¡aE¢ûn±¹”Ç)¶óèñú‹ÖÇIWsÉ-Ö"s‰ú¥Ú0¿󹈹;xôiã`Ú^{“.šxc á™ÂbÌ`òpn·Ÿis2™²òú[Ìz.Oê;ªw†Bæhè@¦v{¦çÐÚ?}xX+PÝuùP½ë–Z«¾ojAµ·ã$÷t”;Âêœn± '9d+Îï GîžÑoUàþÒF´ûr2fصa¼([¦væü%†À–Þ÷€8 b1Ó²¦Ë´d:o.*a]Kö³LÆf²°§BeÔwD³g}Øacö…Ø”ŽÄmÓ°b.!ûæ&:•cd..(ó2{„t#™ÁÌWá¼Hh#Ã’~™ÒÄù93c Mµ!PGá̦[yαíÇŸ÷öÁ‘\²à*d2{öìiûÂ3½æÎ›°oï^»‘Jþ•“ÔÕ_…¨™@Ô”›×80¹À™ÑË  ªÃâ@ûáöI\9Á—¿äÌÚÜË-Þ±Möw=W-E¡)Ñ R kœ3ôþ²»ÙšnVƒrp;»Êµž’pº8ËìKN™ œXjÈX/×ÒÑ1‡ ³]¢.ºf4<ç.süD޳Ȳ`6%g±xRÔÝëDÏïÄ4²uÃþÖÆ_ÁJ˜”Ã;òŒ„YXÉ8, Œ£iH¾3L„H0ÛDÜñaÁGº° ´ƒ™ÇÏÀ|Î3Ê‚è!´_Ît¢Ï³ìw ?e¼1— ™ÌÿÊ¥Í ×cÊÄèè-CsRÀôdJ,T>›R9”CÔ·þ¾Uâ(8þ©Ë;º;ªAnþõ Ñ]îàåÃP*¶ÞwÚ/÷›aæ'Á¢¥ó’>}½œqvcË’$Y«Ò®‰4Í9‰h 7ÑÈ–pȾåRèDN¨¤ ©ÈDr¾2â,¥A ³hHhÒõ«Ð½H‚ãÄÎpU›Äeµ.s›ÙEl`HïeÄç˜)j[•œ8¢j(С;&¨´é—oÉÕž\¡Pxͤ=Aæ»",Ë’{l׆Õ³¿ íøV»Í õA¶ê˜{49ÕÝ#—ç è3%Âý͜Оg9ןŽÉ Ò€”$™?F.Ôj9Taà§7nú`wG7žöÎ [ƒçÖ]æ0•ŒÃŽ»Ö5ŸxðñòÇ-ƒc,Á4$ZÖýÄÚfìMì !¬ô×°1ôuV’úžÉï‹Aô˲&‹yôÓö3ÝQà½'MÁ~`e I’s¨æÇîýbL÷•ßÉíNôºÍ¢.`}.¾Igɦ9¹y ò ªá¸YÝŒà,g°¹Ä"Aíq{3ÁÃg¼4ǽ½±»ï+\#Ë÷å1ŸB«…ùpx¯çúöCÓ¢¨Ÿvh[Çj¹ñCžé14 {nŽ Ï£vAÍ㙯?“ÉIBäDê«z­¶nÝúõEGùšeï9/鬬cÍ=ðëÆz8â±-Òà\p<«•òæé°-iA?­SªÄµ %M˜J(­1û%K¢' ÿÄm}ýŸ*—Ëï“>aòXrš–Ú-4óÉ3aXÒ>áG:ÐÔ $!zF³]M˜»€1“†n(¤ëæšT[È/̺¶LŘó…!æÜ¤ªrªÅ 5gÐòBü1fžÎL-f&þvæ,N—ÏzQ5m*ކ¹°dŒ lÒd­à™â2Ÿ÷3…ÓÑP L›¶*Ävš μ߳°Ÿb5&‚kKÙ¿ø®Éx¶²¹Ré™B”BØ?:¶°øíã/{îY‡]{.tòJ0Ö:cͧááÖ¾”i¼p¸tì’Ádô¬Rylÿx"ž†€q˜öÍëUÒÔÚaRµˆZ\ˆµ'WÊÇœs²Xy׃ïœ=û_ÇØ wkû TEŸE$1tttA¹\J£L¹ÂN^2·S"ÀªÖ\aN."g¾/¨hŒŠî¹¥²| ¼0 ?ІŠpö1c3ÂkW¬íÐOݸ[ŒÄ÷ûðÙL%”“$I/\ï¾\À-ˆ cExÁŽg?¼ž­I ‹ÛŸÕ(‹ÖKq¢¸Ò$èd¢Ì]õìf ÌŸÂ‹å… õu1R.#÷À íþI™ älÀ,DV×Ú½kì¢ÎRõ§¿ãâÞáKŽX3ñ<ØÜ­âÒ<€>V‚u D á%¨ïUÈídºy‰ŒîH|3€ˆ ]#•áþ&Îå=ìÐ÷œ›ì\±‘ïÚµë#ðjZŸÑ Çé¹÷A!»Zi#RÊÑ*Ð&ôfRÒ¸’0[ôf¡ ‹¶g'Ú#h_½.Rss°“ªyV °Ê•«ºëmK¨É¹H 3Æh¦d+Ñ33È6b˜ÁYá¾Ö£˜Ï4•" Òd/gh`„¹ù™°ÅNÓgdæm]U^¿ŽI¦á´phfFø¸¸pµë<ãi#$6_r:s´fj/âxž`)ºþäL†:*S'VJq[×m¼vhÎÜ/œú¡+¡{É îŠÇÙ½ãa*Ž¡3àЖ”¯¥ CƒB5ž§[–dk¡d.² ;FDL㉠`g‚À«q°ìeÏÅ]_ÿÙ«†‡‡?Oa€¡º!ÖvÎUC(Òéë˜4RÌ.\²¡<¤‘”Iˆœìwh¦ýDδXi6¨ÂÖ0dÌ,¿u€:›)Ðï†Hûí4¬ƒh`PDºÎ†gVÈ P¬ÖjÐi1Å3â%t,ΆEwŒES@}–Á;fo›5B+r`æÌº6ƒ, D`©þL* Ìà€þ“F—ü‰€¨Ñ ×­Øø£N;᪥¹Zl>ð[+£rP›2R3 5r„r«‚rôÁ!Y º}¥U`‚0ùx®û“:›÷òãÅâß­aW=qùÀ¬Y_2È/r”e’¦¸ø‰Q+Êá›Q]ŠT+p!v=dÒ‰2FÕ£5Yà€´GQíe;†sÁä3Ú'ñhMÅ€ätCÇG†¦ðe+¤ÑýW3û´…ÛW"ˆ_¬ˆt {p4«1´'ø¼æÇþ¼÷\áãÜÀìEDÏ™o’Ø<“ÒÕdff($ VAÔ³­9ÆKÝ CÈa,‰`"ž„ÓKjß-F÷¾³ÚÛùo°ÒñèÝáRšËF»È·%?úÃÜZªôv~^qÓHÊL&îæ÷Ù”ŸeyRYÞ³V«e'Fzª¨C$,¬µZ­ž$I~“™ èøÒç žE©™µjêÓõ½†Ôþ©°•J¹ÍLøR3+À•¹\)×nƒ\qó™“ÕÙ]îsFËÀÌæI ŸÙÔ˜©÷Ìj}Å2WËÇ-9@C±FÒû–;”LË¡óÌ3áÒË_s^ºF1èK9*imt¤IöY–DÄìåØ<涉úôFK¢‡,ä”C ºËõ`¨Æê'u³?Ð="šìמìy|+l~xÍ s稉”¾˜H3 F·ô[±‘áGj9ÒoÓÔGtR5º­Í›¶ù¬¦Õú­ÁAsv!À¦Üáêa(ÔP;±ÖtttÔÛæ.xÝûš…‡Ügqíªp‘¢¡L»Æ¼Í“´bn!ùÝצ̻hçžùƒz‹·ÈŠUQxÌôCǾiGýHŒeOXGzϵÌ^ ݈Sž©QU€ÂBÎÜr*ù1g-úJ\ÎçG™6‰€f¯xŒ+§¼¤þG³&Ìwpß2€çǯ¯B&3wΜ”£9‹†ªÆÙg3éWŸœ€ kÖžÙvÜrê篂Á¥óqJ$ìÞ«a,iT–¯aÕ½C_?Ü{ïÒéô}É‹.ƒcOß´ àíG*?L¬Až‹Pã9@£JʼnqñÓÑxò/ëùÊU:t: ½äY%bèÎ9ãðÑã룇lïX_‹l+wtu´Z­¡D$cæìi…/VÒ´šŽÜoÇêìbÔÎb/î4_@GGÇëlýdF]—÷Jâä\A“:üÌÊÑ  cæ$x¹ ÂõŸø;„]'^‘7…údŠBÊ–iÓ"=j*ûhé‚:`uÝ̇|ý˱šLÔбÿ2œä5“6æ,ep?†)™P·ÃpÑÅPq„›úAlc>JÛŽÕµ»ùø_ßÉš¢óæÈqˆ`¬-)u­ IDAT† ©´ÓHé,8o†ëè§V2˜DˆæØ®]gTÊåVoo¯?udã‚bÚ‡'“`”ÀèŠ(ØFž-(ºä²EC*!rWžƒ}&l0óÉÒq5-g{€g.¨¶ÉQ¹E ’t𣠇ñúRžôÁuY[¶€â€Ž›Jˆê üÛÆDt¨_Ä¿Ì|·KHµƒt1 }Ⱦƒ3-ˆ¬6ã™;–mS8Ü ‚¤¿Ü/Äñس×rr|b@ÆØî*d2kV¯nûBÑ%£H=•ΞýSã÷žõž—C§-œÕøú‰ °¡5 ³y;ÃæïX{ë#pÆ)§Â[Þ|MÛö¶îÙìì*¤)µ˜O7?Ê’›R-Q &ÁúÒ/mŽÖܽ¢G‚°ðèÃë‹ú;±?^u|%„”q@Ö:býaÍõXU«E øƒ{qBLWfõ À®;D ‚õ ž/Ç ö7„E"\»¼xv\N^dÊ0€Ëœ Q߆a«œæ ‚©üiÜ ¾9ƒ'3ÐÏ€ï!Ç¿Vô+K†2Þ\Š€'€ã‘ÁñZx”yc@€K„GàR@X²–"cŒM!Àvð8< ¿€ipˆcL˜½Dxçüˆ$IŽ€YœK­LDBÀ$c°&à|ö0ü6{Û‘ÌÃzŒæì^Žogœm³Ø.Æ^È9?NZãRó㜠!î€1O©eÄÁ&ÄšX&@q2e\¼®`§!â1À Â•åC‚ ¸²²<È>³‹$NcK8ç‡ëœ(.PL1;à±RXÞÀ¿Iâ43UœÒ$¤Bw:d ƒ X†áƒ ­¤,˜]Äó)8”îÎÁ‡8ç·"à¯\‚òIZ™ï‡–KåçK ˵™Þ šœˆ(£4ñ!¸÷`áùB&#K¸ÁlÁQÓ/u< u@ÿþxü®“_}þ¬Ù/\œ„¬+Ø6½ ®A?”ßD¨÷¦]°ÞñîkÛôßÿþCX;µð‚Ó¡,¸b0†Ô¤-%’̇}l}²öÑu]ó=$êyÁ‚æêKJÀQ®Ô* & 8"Œ–“rÇ1³ðHÂñ$À8$.NŽ=ÿùÑØž:ïÉ{VÜÝ„xë¬þ¾ B"þ;‚Ï8VËü^ú«ÝÙÄ/i³'ÇH©È‰6(ï‡axãÐìÙ¶~ppåLÎ4oŽo7k'8cð%D\fçE$· ÊaXz çì`À´ÁÓˆ‹:ûÆH-!ðXÆà+œóçÊßlB¢WÃüM’d"þ-öE<Ó(UïYfD‹·1Á>—2ýl!3]@Lû‚ÎSaü”èå"{3ç|çôÔ4twuCRã;¾ tm0xücl ‚ë‘üJ©"‡¾¥°#âÇÀ)øh‘×4ó)q^âxuÀùùAÀeŸó ¬GÝô9ê‚â¾€~7dDì±ç[ ÊIÞGUc9ñR=÷¥ñ=:nˆìdœ-3m Ó(p \)ÝŠˆ')<²v#âfÒ…”z÷gÎJÍä9æe4rñÀ÷ÁÜf}^–¼ˆãHm•A€F½þYð¿p5/'·Â¦zæ™V–J#q·¤vY’f©uªMj i¿d¬Œ1©UôúóÀÀÑKð i;Ñã!Uÿ`]_€k¨&Iý/Ä û"n†*ÒÚâúµZíhÎù# €Ê<Î`£í¢Ñ †ÀO€±wpÎÿÙš%Ùüo„a¦u³ÕC!:*•ʯñ,ã8öciUÊ þ~öÐìÛ9ã÷ȲԮı f0`ßÀ×)\˜¨šM®/ÎùUŒó³¢(š_¬ã·a2Ö6ùÞ6sOò¹Í›7¿mþÂo\øwçb…wðɤÉ\DèSÙÚr/’€£o‡Ç›o}g±™´bÕcpÎÑÏ…ñê ^4ºb®ü/BçÂÈ>'ƒ–¯Šš«n´³·Üï¼z¼YŠJй™#£'31€2O?¾[>ØwÛÆZcïŸwÈpãÎ#§ñ‚h ú ]Ç ŠÎÿèOîûέ¥9Cs¶(b0§1(‚`¹¢@E¡>zÑv'5ª6¡¿”hëdÂôÏéWZÐïî¹þõ«_ƒýêF€¡ô~ò 8 :äéHDo+ˆÅïvɼ¼Ž9§-n<Ñ ¥Z@ P™U&dëļF .¯7(àmÛ¸Eé£Ëžs|¼£JMo…³»û”KYžÐ{܇ï(pI•[n-˜˜œT¸àN%eÒèîîjGоŸ¦ ¦–ŠU‘1.—ËM2…[èBx—‹}uýI’‰r¹ÌšÍæ|Îù‡9ç‹Ì3Òa].—ßð¬˜L£^·¦œL™qÒ—þâܾzëÇ€ÕØöúXÓ:AZPJíŽæ:#SÛ÷«–~ü››`ŒÂüÁ!ß7«W­‡­{wÂü+އm¯? €9ÍZºà“LÀK cä"*?è%! (éõ˜FÒ0wêÂ@¨Iæ”|åe}KgŸ[ŸøÏ'£m›¶Ô&7ïïÆf¸±ù4,«MÑÀýlUNSx=cl"Š¢w\´»äütwwgÞxϬ± ‡A`î“ôÿDgï±´ÐdÎHÉà)ÑëÅ’Ë <ÁÜÌ6[ZÔÎ\zlËãBˆ_ !ÖHG("Jtí“0K›¼T*ý¥ÍÕÉU’™?—1ÖT[˜5Žç2`§=G^ÏçœN¶†áâ0 Ï2ã2þjü7LÄÛ9¨ü>òœ.ÆŒnTOâd10SÙºŽk2W¬ënƘôÜ•ÄÉFy¿T .N¸)퟇¦ì“«¹*˜'§ôƒ»Î>š$âyð_% JÏëúP¹\þ0’ìaù9Š¢+à ø„RH]~–»ÉA«Ú°4?ãÖ0 ›tL*§'­œ¿OÍÌ3O¯¼/ý1a^ÀÌQÄüN’äûÆÁ-Ç!éLr!ÄjU€Tù53ÉÏ^±XM„ø2€A+Ž>X­T>!@ZñOëfÆXÖÌB׋:V&˜H2öH¤|¶ñUˆl›Á•„¹Z©È…ö‘(Š®S޽ʥ?#I’ã't?çaÀƒs2ßÝŒšÁ€6µëaš1ñ Dx;‰n’¥µÚ)Aü¦Õl©ÓLcý4)°÷ê‰I3YÚW(WØx„&-C§"óÿ)¥áÂ\Š tvuÞ ÛQ~±RÉkT.—0É‚d3j”[ úš‘Éøžå”Ð8ìݽûêá#Ͼ洤Ì+@§ž–¢†‚rz’€tÔ"ÀP"÷4Þ¿žîMÝPý1‡F!æ "PI:½Óš µËšëÎÈü2rSÒc§V‚C;4YÿÀj6oÏâxÇ,Qâ6+­³VèÉ’[k¥¶3$wg×’ÚÚ«ç%ËN9¯¹ñÃwG×ñ#Î>¦5:g´¹ù¿í”.»}{÷@JÐ?2[J¤Î™3ôKÎØ#@5âXÚùK/¶6 ’R”1ð´â¶ãØs¯‚³F]§bÌk×ÌDüÍÕÈ~OŸOáî`Œu+tQÛá·&»“Z8—Òñ()ݳñ©äg}ºè"-¶­V¦áÞl$r’{Bi’àË©_Å@*„hdNèl“Íd ÿ¥d.\lüZB™¸(+†d¾0/_Ç¢œ:³YšMÌ8ëCÄmãZ³0§na ú,´is#·Œ±Ã°¼)E¿§s ­Ù––J;Kê\²¬z¤¼Æ\•«„V“ ù@5¦ &3%åQÊ*:¨‚¬:ׯ7ã³ý]#àæÒS7*ÒújïW‰bP«–^Å.m æ’`íUÈdzzz –@Ô'ë5ø—åïº*%åæ‚]ÍQXÙØ¯NÒUõOœYR!í ŒtéÕß)Ñ!¢ÔXP F¨ü]š†­ë%‹ÔLJ8Œ"ìºzYÞ÷t8ð‹]|Çë†àª ´0õÝ$¡&£–jav‹àa°æøjü×EõCÞ¿×ß½²S¦ZuVºX­o–˜{Ò¢úä–½åí+ž*õŽ Ê2¯‰Q<žƒÒHGC ~’ŸLäEfÔ°…T΢Ff}?¯§.Ó[núºßÉSM„Ósþ"¹XºœÈOÚÈ›c/¤Ì Sím©$ÊL Ðó$Äíò«tzÊr…Ñ Î¾Ã›v”BæœIý&‡œó^û—e]/E:Ca³§ÍxÎG»’ Ùãæ9¿Nz»d~+•B½m@h‡'4MÒ¡ñµpλe!3Ý÷ßKó¦¶3¤;À•@ש ûhª€º/ÓZ­šÔ$dY’+Ù´þìYõµ7Ó…GC"téÈàS`è„ê?S““wpD\g©‡©¿ÅÑt|jehs2™É©©Lrè¦$<ÒÙ;¶kô¶.t-«ò©d ~7µ9Ä0¦’攣µ‘O­¾fZŸW uYZ“kMÇ0…¤ÉvròLÍ^•ˆ§ªÞ¥ÌCFú¢Ö,ÁòQ—œÔ\õÓûÃå Ïi¬:§«c0 Y¬ >° 3'¨>¤¹Œ³£v•ãZå˧;¯ÛÕz:K»ìË_vNsÑÇ•Ç4ž7}3®ï™˽µ³…É+º¤4 3)c&ÏÏŸALmV/ÌÊFU©Á2oD'¯©²žvcœ;!Ü.«2/㜯´~áÖñµù¾†dí¹Z@F]®3Ø2‡’(ÉŒ¦e?S3Œ3ˆ“d ¸+à<̇èg Çã8^)„øJ¥’¤ª´0ÚQä›–z±ÝÄÛFëVh¿·ò #ânÎùmaÜ-O甉®Ü7eè0¼ò .#V·´#c^dŠDç°@C0N_Åð4ÌaV”'ÈF„ –Ô´Ÿàº_Ôc×Ñ´0ÍæN}›²D‡¡+¬‡D30rΛé¦ÚH%ÓÙ2ŽÙN ‰PóYm–Ë\2ZV˜ieš¤ƒïï!â³ s`„§ŠíÈHe«ý‚çGt¥ªb+)IŠîg!ìŒÁ¾ tȤêfF|š±J§\)TÞw·xSNõ½/Žã—º¡Wõ÷¯›­æ…Ah Æl„”ÀGú[ÌdiIø‡‰>Ñ>rËÓùA!b?cì"VíO)MÉP÷_9 3×H‚–ÌE.œèCîÔ{m}0? 8[VH²1e³ÖQSß:b“åÍÀàÚ ½»Ã3šGÛÎP|œÐ´ÙíT/T¹)Ùîx©í•ËåÍÚg!áÞ”F3…N $•‹$LÀ9)™©ˆ¿Ÿª‰ÆYÝjµ¦$ޤ«‚™°3MSÐັƒLrÞY0¤H ¼T*ÈpœÁÀ8{çü€ÍX.<Ô?5oM°¨è*Üx³mÛ6õoû¶íòøXذnýÒø»¿át¬ò’2“äæÇ§[Ó0ÈCÅ¥š‡Š¯ë|®µ£›tÿqà~®E8Ó5}›y’ií/#ì cèßL-¹ascå¿ý¶³ Xš÷ü#â #¼$w½I × Tša1©ûÀ-ãJcK#>è.Éò¬ÔW•†?“ô÷p2‰šIß1== ÓÓuHÿšSÊw5>9©¼îåJY1ÂÕLÇü+—äq¶áÏLòVf÷+œo.Öd¹ßå:ºr²W¶­À®7”aŸ¡jºt&J‚Uùòs «}Á>ºtnÍÙÀØqŠ…€Xç|ÐHšIŸ—Im2Äj˜ ¼ÛŒÕ¹>’a³|Õ»h•ÕB–ÌV.P¹`kÕª=ˆæÂHF:ß“¯¶Ñ0zÑÅŒßÆ13ìÇ)cIǤû°ä˜ÏI‚ÃÒcùaDcŒÁjÚ˜^¨WÈ”r)4Q¸T (†§Èv¥æóÇå|Ù‹dÌm:ÝfVœ—åŒËŒeŒ“³û˜ugfçÁ7%óÐ97ê^©TNÀ"ÕºU´P®íg«ÉÌš5Ë!Ê­[¶þÓ²KN…Îe°¶«µmî…N™`/¦ŽÕÀúÇR_‹WFŒÌnê4Ÿ%ó¹Tô #©kÌ&Ô‡¶ÉvÇJ‘Äbý¸›Çë¾ym]Ò´5"ö®Ü^:öÖj}ÅE}åÑ”gÇ,- Aö 9Q&ÈêÓ¨ÉÙ¯âr ÞXu5Á,ŠÜS€ÍJµúß)p_äY“011áÞÈÛÈä>x0W”˕"y+"Þ&“ßd bø<<Ä„oÝ>SMF5ÏYn1[†%\sÀÀ*kád5Ô¡sw†a¸Ô¾g¢*aø;ƘÌ6¾wCª9HtvÉLÆØ cl4·‚0ø¡©WÌ8¿@…G!zÆØ… ñ'y>¦|ú¨ ™!ÍÙ(£]‡'qüs^.o‘LZj3ÜK:L] ¦}ޤµ™?è Xˆ¡0 7JIÊ RËXî ¾Ñuž§šI³Ùü¡ÌvÖš ÔjÕ[8ŽòÒd–ì¿°ke]st0¾Î÷ý5² ‚LC0‚ÉÚ²ZM—иædL4íÂä¿Ød–ßÄ„üýæý¥°Ô'0Ñ}ÉXâ—‘ü„1öYìqH“ï%Ú«j¾ÈjsœóÅ­VëGÒÐÈ#³ “¡ˆ™Ú;~BoW÷%C/?NTy'¯‹&Ü?¹IÝëdò; ¸E #I‘Ãì b­ÙpÍ€b^k'²Ú™ÊØÝ^Š`Éf1±ÿúGøc[wHBÄ‹.xáôâÅ‹;ÇF+7þøÆŽ¶v³ã”©•o AÈy’ÅÄXÚ&Z 2Ó´¸iZ=Öè䤋I‹£kò¿^ŰüšÝ»wO åªl5Š#èéí…ŽZMGAihŸ×»<¾) ¹ÐM¢“ê6pö@|Ž™?«Õq5˜CîÕ""å"‘Çd!¼–@u~Š™ë°Tú0gì%­CC!ºák:Ÿe\16Î$ÍÔPj}Ú<`Œ—ËåMGãá‹ìz´ª}ê½ 8ÎBÈfS²±Š:ÁQUYl4D£Ùü*Í}êФ§`R¿@sqîùÖ’™ Z^'Eè­­Vk4Í?AÉø»xãG³dgœÄ÷íÝ»o'!t™ÜÞÕÕ¥6‡zp\ˆ0¹¡U*Zƒ)óAb:Z*øX¬ïŒüN˜¶{&¶ñ!º%XWw·|>áAðŸÅ[fÌ2†v…@¼‚q&óp’VKf¶a™qA³öšÔ†¿—C~ÛdUý™².§ÿýûÿ.¾ëA?#ýIDATËB¶oz\Ï¥›v$ðö‘8 …v0“\Ýbp“€¾ioØ ôôÃîÇ·òñÇžPJN_gïw;†z^#ý;µZÛ(ª|6,•`ïÞ½°g߾⇼«··÷²®®®_%q¼˜š,™SÎ9¥à^ÎùŒ±9I’\é„zåÖ0w3ÛP¶„í‚c†„Äíþ‰PŸžv«T«Ÿœ5«#‰ã1K˜±f §s!¢ÌŽ%„ØIüÓq,žÇ9ÿ¹ôdºsæœEjªa*Í$ɰ¤ƒ1VµîKb6èÃð¬C“F¨Tq'/ℽfw½Óê‹Èìt#†¡Rç¿MÀdOMNN½q|||SºÝÂJ£Ù¸¦V«öF"Îj›°,O<&«Lr¦Ì­ûc—’¼ÍxÔ×a¦Sˆy; ˆvb»J?°„ªe­^˜mP)q,“ý‚ Ø]©TÎEÄïÀéà]Ìa*Eì2áp¨ÿ޹ ™ŒJuæ Æ“Ÿ9õŠó‚¾y}bO4ÎhŽÁ²_ì‚Ç¿?,¼÷pØñÞ#a¬‹Áœˆ«$¹(zRÊi5l%¥KFaÖ´kU—kuÄh5ªò,LªTæÖ¼ÏoOîÚÝñ®¿¹N2˜š†W¼ãškÿüµ¯I‡-_²dIyÞìÙqÃdÛC+#ûç‹mýz·R´¯'ÆÔ9-RL‰ù¿Ø?¾sgçðððo“ZùŽNÞ{JE?L’äÛ' üÃË‹®ƒ”{(˜¬'’$9V ~™38QU|ƒ DĵŒ±G_Ó¦‚Üi»Ö•Ö8áôŠðGdö`×ÃYÓ˜X¨¤—Ïd‚´Ÿ#ÂÏðpRJÐL-btkà^DU`i…<­³£c†#Ýc#·Œ´¢ñŽŽŽs8çÒ{ÚnŽÅš„uz“Ÿ¬t÷KÙf%Ý+dÆ·×*'ZÌž,/‰ð±4õê!Y%UG–Íž°'AgÓ–¤‡xO£Ñ86àüJÎùódÉé–„^©ådym)P!]Æ6 !naŒ}@¡vѣȧI‡-"¾oäœËÖGÊöK€jarB$›c?B|Ú8Õ‹+=èDœ—ÝRm­403žÐŠÜ ¹‰Öº•0–¡¢à—Q± Î`Œ]›ÄÉ+ç ƒ!‡¡ea¼q`°›H\Ü$µ®¶4_ôãü‘hN7Ùä¾Óç|ãêê¬EÃpßÔJذw†Þó0LÃ$Öaé N€µoŽ$„bÆZÜ¡I%„Œ±”´é‚šc0–j=LšN„ÇþpïÄŠü¾÷”“OnÜÿÀœŠâSN>üãKŸüØÇZïûàÕ¨ôÝ$/Í+KG~òöIJRÐ… ¡¥,Ìn7ØWŽaÖ$kˆ¿¼»* ûÕú;åQ%uµÉ.ŠT4Åx;bP- sOL¨î3¹dßZU•#–þœ@–I ‚`aº³‚5[MYL< )Ê>‘ËçðaÈ…nÁ5%ì¥Mr”ÅŠ’$¶ÏÉ”øþþ~eúµ¤d UdP±˜ÜuÄqËf›q¯<›ˆû‡±k'©üM:M»ºÒR2'³b¥™†áB΃2çŒEQ$³d9ËíÚï£ç Á:’ Æb#kd£ž‰ÔX@|\èñ›Å¤LšÙÆFhÉn–e=D”‡¾ „aX– †QíãøÉ ÷¡ÞO415 “¾Ó__ÒW×ÝÙ™‚Áy  !)çç|Bj€ˆ¸nzº^¯Êêxš ÒŸäišà™NΔþ“.¢%• »?*H‹¦MMNÉ4‚JðùÍfkĤ H3>*·œO˜y’¸Û¹sg!Ì…šÌT½ûöí½îÄ—œ]í_4ÇZ{؆hŽ~*†'FÇ hÊÓxú7+áÐ+æÁÓó茥©0~„$§ŸkS 2Lb4‹t£¶4ŒAp{y Ëÿؘ\ñƒßË~äÃF%6ÖŽH÷Â… ÍÙnR5•ÖWLË–KÝšZC•vÖ^•ÑÐw¶•žœ‡ZoÏ;vì¨Ër ½==ª”(´ã¾ä²Su°‹.Í„I1`™ÂÖÊ~ç™SË”…È(%ëÜ…!»ÑÖ7aLwºŽŽÉ8˜™IÒ‚ÝŒ±Ýfƒõ›œ ò~!޲K’€Œl2žüŒóÕ†ˆ‰³Òä9Žj²¢^ÉÂÊÖa¶?΀i³Å€í(TDMÁJCø+¾ˆ©šv¥…Äp½­Ý+q@ΆrÆCðH“/sa0ù[ù³¦2&EI*€rÞ§Ñ´& l@!684eÝù=zEW!»ŒëMy¬Ú‡F^r¢Š0®–Iwr «ÆU¨½c£°wr?ô/šc] ÷H1ëÔQCª¸gxL•šéÈ!í"8ôéxbË?Þ«*`ýÅË^:uáÅ—˜ºè³×_?~éÅÇ=òˆŠDΞmšwÝ}Wê"I£`jž•ôæË’ÙÆÀ<澆XsëÃAÿàìëÆ Xš‰}ffÐÿ„Ëhósíô§Dà>–9#Ûô‡¹÷ò¿çÓô²Ýιþm¾ ]™äà5ˆuk$a,ç'°š P<ÓLTÔ±MÑà;t‹ëªhÆÜ…èF÷À™ï”/’σOŒð·_“»‚ßÖW×®«¢€Ã3¼l®N.ÉŽøàÈyTù+Ëhnwj2õ©—Ÿybo÷p?l˜Þ [“†Z¥ã+Ó“Cžu ”é‚úsû¡Þ@o‹CÌtc<Íæ¥¾l¨÷*¥^îì¼å}¡ØçÒ5­©ÑO>Ž‹éêâ#Ÿþ÷oÇp†_ÿò—›ïzï{%K ‡fׯºòÊæÙÏ~‡iþ×·Ü®¶)Õ‡JjàÒØ›4ËÌL‡]¥mŠOþ¾ZT6 ïíéîÖ“"9öŽ ÐÿõËDg|zqHý„Yº¾%¦,˜e«€4–Ñ(]b“°–µ ‚cû•h$±Ó¾«ÉPŸÏÄòäçj\nå8bX£›å3{9ò˜‚ו©£âìÍ€,JFaÉ2Ö\mY¬™·ögOÓÎi Âó{‡F‰V–µ1Ct|ÅRCc..¨B@kØ€¥´ÍU\ê¡ÒùÆ…¯: öF»àþ©På¡ïŒùÐ{Æ<þ¢>0û¡&W{•BÈ©6‘’eѤØúÒê6“2N*OT§pzñÍ{põwï“MùôÓO›úÞw¿Ëj2åSª0­fýCÿ¸:Mñƒï{ßÔ»ÞûÖÛ×ßeÁï¾ö¯aë®í°üŧÔW̓jEÖ—ÑÎç4R-£Utú§«MES00gè"iKAhdÂyÑDNOÔAΜÉMõA“¹+·MÞGö, ÅÚð'84—sØÙ0d6.òHnŒÅð2NÎIå7 ËBÁvÇ´sî7a´Lspïm †å¯3àß-cC·¼× 3«5@®\©›O#´/è z,º³KÒ?ˆ•{Ð~Ì0X!©&Ѧ$ôÌfþ ·ÕóE¶ÏÌDÖ…Lfþq‡ŸY9¢~2µFµÖÈês+/ïSß{“TYT#Í}ºCI‡Œ™ÉÜÕ.ýXC±K®x®öEÃ{ Øa¼j´öÆÂÕM¬ õ â«_ÿÊÉÏ~þ‹eÝDôLjß÷¾÷±Ñ;+W½ìeSûä'«&µF2˜/]ÿ9ü§/}zÆÒéU¯ªò°OVã“»ÀÍ9M•X¦7¯‚mÚ±æÏ»FÄÉÚž®n×iFÕל“?Òb÷YXL9ÿ@»û¹žé3$q < öÅ}Émýu¦µª:kÜs2F¯v£yøæ:KÌ7y˜=0®ÍU€ï6ÇÀYX³þÁÃ]8¦¢×#šÌÁçÝžå8Š ÿµbæmRÊ;ÄŒ¯Ð‚ÎÿHýL¬¨o›œä#Õ»ŠëÉì:0þÔäV•à?›…iN¯<7)T¢]¤ò‹Ó-tCc3§®Ð%ïÉ‚à<‘ÔÓZòx«ß±7ܱ²²=õ •æõ Á®{Cãï?ú‘Rµ\Q^žÛn¹eêÓ×_¿¹óNÉ'ÂÅ‹Où«_e”Áüõ›ßŽ_øÆ—ÅÈG7Ÿ¾faXPšÓL Ó~™o‚5}tn\õduxd䟒8þše.Äy•“á¾7ž|"Ŭg$𢫽ã‘`FÄÊ\¼¹Bf:wå"2ta‘¤¬"Æé:Þ %æ…9G é—`@ޏÝÓ!]‰˜U¿{V†j…"0¢©¢{ŸšGí"932A:xÊÈvLÖó¨6…QDÿ9ôþ‚K7Åþ/ ²ÅYÁ³Ä|}ÖæÒæ›6uÝ8|¼x AÎ9Ýd;¢+Ú ’ÉeÒ©)™i* •‘謭)DóøŸ××ÜühuÝè¸\ûüй‡°3žwZô¯¸*¼üŠ—p“85q`ýW½oºå£±D½ð…_ýú×ÂY³jº«øôeÏáXûpkàm§%ÛάÁœ–Ñ`R­Jú`‚i¨h%lZ¿©6{öì—Ë¥w—d3³u_#Žérˆ-Ž,ž<¦YŒ5Bس\ ž¹“k<³rsî¶Áì³Ïªos2aµ!n×·€Î{44kfô‚ØR Øf™±pݼ™Ü¶€~éG{fròÛ‡(jÚð¯ ß`M]§)2  €öÍ©«É”îÜ{ûÆWÂeƒ¸»”@‹«[DPiÌy‡²ò]¬M$¹u2=¯¢¥ŸßÜzôž]ó{æ&¯Ï_Á9盼àüº>55Ü~˯ê?þÉOÂÿüñƒ¨ÙäÇ}ttÁùçGgŸ}¶¸ôòË+Ú|?ø÷ïàµo|;î¬7*_:Ÿí]PéêC(¡LDuc°?ˆáЭzüùÕÁ–Í[ËsæÎýy’$/56µÉø4„Í™RµÉWÅsæüòì4óZ¡Ÿ‰C©Þž¼->#Ð Û̾çÚ¶R<ƒ©ôÇ èšI™ªæ€A¤fëè>WÔwQt,çñ3ß)Ó«)+Úí=ëvž¨Ýe YõB·ÁL»ò5\ðÆæÎ5 —bFL5Q*MÄ+%Ó€™=¦ÆeFþUÈdº~¹sÛ8öWû×ö$²(€Ìè-kç®É=¡Û(ýWèÂQÓª8FG}y{sõ=+º_pöÙïÿ׊¡¡¹5ýVü¶·¼%ºõ7¿a£ccÁäÄ„Ô\ø²¥K£¿¹öÚäÍo}+Ó©-ÚÄ/9ÿEü§¿¹9.=wQï^6Ês¢’ æ7¡¦âW‘ªä¨ßO6¶~ñÁÚx4ŇG†¿ŸÄÉ«ì‰+ÒóˆÔYÑAAâÝ3a?;mâ -G©ö䟚1´ (°‚¯¹ÄéÛäms3|U.§…–ëö­.7¿‚:y #R#Y°ÀÖ!µp=C´ÓrŸhü f$B–’nÌA}ì6’“óíÏà ÊuRt?›˜B ³½möñ“ÌîùɈ:¢•3CYö ýj\v‚(sæ™hó‰´2™j­¶³¯ÚsÓßøý% N¾0Ú<7 äb–~˜³„:Ô{‚TyM=Ge=©û'ü®Ñ|䶇;N<îøÆíw©<rŽN9餸Á‡RÙþr'è²¥Køçßúü 7Èçjœä;ßú¾ÿ¯Þ [ëcÍCÿêŒäé ¤ÙÄ‚Áf‚§€UuNv$jõƒ±æºÿº¿[ž:0odøëI”¼5Ít.ÕQ§œƒwfor~žÿš ä'Êþµ¡Ì윫â,N@3ž€³·‹'‰å1ç!Ë$ ‡ëgpV° òï™qå³RÍæÊ€ŸÉU É8!ZA/áÚÖ/TØ7mˆÕèÀl+kG9̹gÚuZ\eÔ6èhÀT+׌ÍÌÞŒ&-/F½ñ–äÌd/.õ$P›ÕõªýÛÆ÷7¬«ÂÇ—¶FKqyNªd<®,BÅt¨¬x5Àî²€ž)h®üç{$awÞ}‡äEªÌØè¨¸ð ›<úhø®ë®«_þâ'#óçóE‡Vó˜ ÔëSÉ[ßp5þÇ€lNW´à+ãÓýA—,&Î…ŒZ Uó7$S‹`ÖnQïÿúÆdõƒkººú ÚÛñѨ}ˆe¬$É)“³ ½ÃôI%xÊP͓̽hsÙE˜EoÒòˆù"JíüB®ºBmùƒuÎÈ‘§®Énü@t×¶ZH¾  \öw‚4Ë8ÉWµÌ7æôÛf§6øÚ æ´‹Ï¢ç¼6)H6/ü¼Ïvþ ² RüùL‹ü9mR‚2ºÍÉÃhX·þ˜h* 7:Z°åͬØÄ&W!“Q»?“äÀÜys¯Ý¸êÉ/.»¡ ×¼c~4ZŠK’Ñ$ÚN2;­ËÚ„²e„ó¶[¬n´ªïß{¦{zû•³oÏ^|ÙK_:Ù×Û+Ï‚ášñ }믢×_õ†pÇþQ\|ѱͧ޲ ¼ <»¦')r„’t,—“Ô<úíT}Ç×7Lèž7O&Ö½ºÕl}/ éð R-B}„“–ýë8êžÍE¦“ÅŒ5竳®ôv>™$•¦N–©“¿B¡Rß#Ø"#·ˆ}éçgé!Ó×ÃVÍ :»Î:EZF¢*š!Â~–àáO´êͯ¯ƒ'6<Ý=Ø? Jµò·¢VôvÆØ”ÜÐǤC·-V]I[E*0HœûÙÏœÈsN9ÿ¾·çGùï:WÎáë·7[Å$/Í 2…2†ìu—KÓ Î1—|éìà”¹ª7Ò§2 Ëßfà\HAÌÞËá ÍBÈ¡ ½Pº÷€½šÉìÈîgõù@žl¬úئA酪Â(`N¨¸•ö¬¿Í‡Õü)À Ît.Y{&“¥…kÿ›æÎ›»vûúMŸäÿ¸©kéù'@¸¼·5½°&öÍ ™ TÏÛ%å Óáäï¶wl]÷ŒÌ}{÷à _ùJÇE]4}Ñ¥—Vg`0xÇí·%qáËøîä@²üÊSâU¯’°UfGÊÉii“æÝ`tÔ-û[Û¾õXí@s‚ÏNâVô®¨}Áú_¼tn«òùDéÚvç‰%>ï®tXú³¸¬ÿÅG‚=â#c(Åábo‚Ÿ©"…®Ä²˜É IsĬrlñ,‚ÁH‰ÕöyE… ø–ˆãZ*\ þX½Æ‹`‘)͵SŒŠj]ù=m]›‹jpºQÇÁʹ9…4»mç£ r²Ú)¤ÖUõ ƒÔ IáG{D-yÀ¤»¸‹KS@{&ãB®ÏúÌìáy7FõÖ?¬¿í‘WòÛ Ü7k:{;ccã°êtBfÍûÍ(Š¿Ô××Û5==õžË¯¼òE¿½óÎøÔ3Ôé–4¨ÑúÛk®e_ùÆ¿È1´Žûðùâ±ã«5`h¦Û˜ ‰ì €IcÉ7·%«ïz¬³'ì”Ñ£›[¦¬¥ºŽÓä.Oqí{ý©À3Ó¢È>TámÑ"§£‚›ñê3cN’¶ÓÈ6ZU$X½;Š ê¸5 }Ôôhë&ŒuÊ€¹“ÓÆ¼ZíˆÞbk«ýͰ֙³¶½Ð¹×(9tß”Ùtú›ˆù¬$ ÁoÑœˆ]»±Òdíð„kûÍ“t·w¶éѧ‚œv–µ{0ø L&ÏÑ“8y8¼¶pð(Ä1çMnÛ7GšÂŽòþ¹sæÞÅ8ûrÔj=©òiÂ!‡rù–-[¾qÙ‹¯xã?~ô#W¾ö5PíèÄõkV‡wÜñqýG?Ã7mÁáÓÿ_{WïGÅßìÝíùîì!kËÈe"%-ˆ‰‰6¢ˆèh(ùRa$ DCB*$DE”@> g¤ H;²dÇwñ÷víÇ̼÷æãl *äÏÛÝùØ÷ñ{oÞ{sîp㣗¢Û {gÒ†qFIÿ´Ë“ós¿Œ÷×~ï®?ڀ䅳;ПŒž©B×Ä­Nü–âÐT¬Ñ÷¦ôãì2à8ƒÐ}<Ìž~FÛv§è^kΘðE@hÒ«ÙZ´ÖÓá\³¡fJe:`S€ûYl ¥$¿ó-ZPÉ 7kÙ-“²ô»`JiÆ;-)þD¹czxR#å<0g“9ó€KpX~“εrP)<å«4Hƹ4dFÙy— l!ƒ«m¡GWg°ü•¦éå^·{¹ÕkëÊZ v©ûŠ*[EŸ8Ž/noo½øþ¥Ko~våÊA§ÛÉ 4wöžŽœô§oLï•èE4žŸ4Ë­ébÇjÜØÒbÛjüòÚ¦X¿~³Û†&$KK_æYöde)Dºèzøˆyö‘HLÓíNFà„ÑÜ+trx•ɰÜù$N†_ïé×ßIHÚ šN1#Òë°=îB…|¼$TÅœÈ\sž>g£Ä5€ 0wkçãÅd¦î¯~F%0ÝQÌ._‚» GúwÎÑÌ|U ³&Q ø_ÂzŒwýpð$‰ÇBèʺÅÛª6—Ð6žÖrõ³TÁ%=WõñßE¡mqxI’¼•KùðÎÝ»«õ-“óï¼–Þ]jÞëÀ|?‹ GåNÕn™lP¥$\øq(Ÿ|óÇÜýGp¦úûV¯ýñ4ÞPgJÚ'vUíTSU'¯Ýe‘Êé\ièã$Hj$c½P÷s\šr&¯rÖn€ŽiÁcÄ™®uõñ¼vTõVÓoÇãûMðŸ=O…šºVu§™]ÂQ±JððضÔùìAbf'=9Ï=³ÐB2x®>÷œž§ƒ€ÑaÄ(p'»½úÞ«°ùú©|°Í›úsYöŠ˜¤Š–é+? ãñÏ[÷o   Íï––—?ϦÓoñ"ððôY\Õ(Y4½FªzäHB8âsÖ:rñe¿zжö­wœBÊ™@iï8pÏŽÆQ!Ú^ži"àìeP9nU6÷  ª,äj̯*ôóf‚Ë#`dv•FG8'ŒR$I+°ßGh-ÀI–H1zZ0ö‡ èÍõ'¡4êèÇh'æJ<@§€GBú¿dasÛÛi+Ï€ñeraž—Ç»–±Gåù>w¦ãô"LòµÝtÊ€›"ïHf°¼•Áâà 46'bôëvüÛ­õÝàúüBÿëî©ÞÕâ(¢ÀuQä¸r<:Ìnø’±2߀NÕWÂÃ&…:ì²J¼ðì¹#6QS.Ïï¡z²:Ä{á”*“üjÛÔ³'F@11Cå®Vˆ B' ˆØ &‘ÔÇ„žèýѾ@kÉ£ØI>“‹sM…¼KˆmC4jFú}¬sÛ–¯JÖûF.‡*3e°PõçÒœ±nP)Ÿ¡DH«RœÜ_È—BÑo½&r ¡yÿ6±É¬Øn‡ ®ŠjkqÔB!Fá^"ve!yî‹ÁW?%ý«‹.¬ž>ŒûF:šdã!üù÷ÖÁ²&Àµ•••µ8Ž?ÝÛ+ÏZŽZ‚$?é ͈B+‚ L à –²§K ’ÚÚ³Àݸ6@g„jÃù3 í°®™€ÅˆLÌ‘¬[Käx¸ñ»¨¹ ´C–E%|šël3ˆ€Á  !€Tíyy–@3Œ9ÀNOK*³uãÌÂuafuÖ eÝÄÌ8ÁQ"Ÿ¯%¬xw’Ð<`%+)r!>•$ŒRJl~q›y8¾æ¤´“vÒþÿÿ#²d¸QIGrIEND®B`‚pontos-25.3.2/docs/_static/logo.svg000066400000000000000000000507151476255566300172170ustar00rootroot00000000000000 pontos-25.3.2/docs/api.md000066400000000000000000000003151476255566300151720ustar00rootroot00000000000000# API ```{toctree} :maxdepth: 1 pontos/changelog pontos/cpe pontos/errors pontos/git pontos/github pontos/helper pontos/models pontos/nvd pontos/release pontos/terminal pontos/testing pontos/version ``` pontos-25.3.2/docs/conf.py000066400000000000000000000157001476255566300154020ustar00rootroot00000000000000# Configuration file for the Sphinx documentation builder. # # This file only contains a selection of the most common options. For a full # list see the documentation: # https://www.sphinx-doc.org/en/master/usage/configuration.html # -- Path setup -------------------------------------------------------------- # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. # # pylint: disable=invalid-name,redefined-builtin,wrong-import-position import sys from pathlib import Path source_directory = str(Path(__file__).parent.absolute()) sys.path.insert(0, source_directory) from pontos.version import __version__ # -- Project information ----------------------------------------------------- project = "pontos" copyright = "2022-2023, Greenbone AG " author = "Greenbone AG " # The full version, including alpha/beta/rc tags release = __version__ # -- General configuration --------------------------------------------------- # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ "sphinx.ext.autodoc", "sphinx.ext.githubpages", "sphinx.ext.napoleon", "sphinx.ext.intersphinx", "myst_parser", # "autodoc2", ] # Add any paths that contain templates here, relative to this directory. templates_path = ["_templates"] # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This pattern also affects html_static_path and html_extra_path. exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] source_suffix = { ".rst": "restructuredtext", ".md": "markdown", } # The master toctree document. master_doc = "index" language = "en" # -- Options for HTML output ------------------------------------------------- html_theme = "furo" html_title = project html_favicon = "favicon.png" html_css_files = ["custom.css"] html_logo = "_static/logo.svg" html_static_path = ["_static"] repo_url = "https://github.com/greenbone/pontos/" html_theme_options = { "source_repository": repo_url, "source_branch": "main", "source_directory": "src/", "light_css_variables": { "color-content-foreground": "#4C4C4C", "color-foreground-primary": "4C4C4C", "color-foreground-secondary": "#7F7F7F", "color-code-background": "#333333", "color-code-foreground": "#E5E5E5", "color-admonition-title--note": "#11AB51", "admonition-font-size": "0.9rem", "color-background-primary": "#FFFFFF", "color-background-secondary": "#F3F3F3", "color-sidebar-background": "#F3F3F3", }, "dark_css_variables": { "color-content-foreground": "#F3F3F3", "color-foreground-primary": "F3F3F3", "color-foreground-secondary": "#E5E5E5", "color-code-background": "#333333", "color-code-foreground": "#E5E5E5", "color-admonition-title--note": "#11AB51", "admonition-font-size": "0.9rem", "color-background-primary": "#171717", "color-background-secondary": "#4C4C4C", "color-sidebar-background": "#333333", }, "footer_icons": [ { "name": "GitHub", "url": repo_url, "html": """ """, "class": "", }, ], } # only shot type hints at the class and function descriptions autodoc_typehints = "description" # parameter types will only be annotated if the parameter is documented. return # type is always annotated (except if it is None). autodoc_typehints_description_target = "documented_params" # sort members by order in __all__ autodoc_member_order = "bysource" # use class docstring and __init__ together for class description autoclass_content = "both" pygments_style = "zenburn" # -- Options for HTMLHelp output --------------------------------------------- # Output file base name for HTML help builder. htmlhelp_basename = project + "-doc" # -- Options for LaTeX output ------------------------------------------------ latex_elements = { # The paper size ('letterpaper' or 'a4paper'). # # 'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). # # 'pointsize': '10pt', # Additional stuff for the LaTeX preamble. # # 'preamble': '', # Latex figure (float) alignment # # 'figure_align': 'htbp', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ ( master_doc, project + ".tex", project + " Documentation", "Greenbone AG", "manual", ) ] # -- Options for manual page output ------------------------------------------ # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [(master_doc, project, project + " Documentation", [author], 1)] # -- Options for Texinfo output ---------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ ( master_doc, project, project + " Documentation", author, project, "One line description of project.", "Miscellaneous", ) ] # -- Options for Epub output ------------------------------------------------- # Bibliographic Dublin Core info. epub_title = project # The unique identifier of the text. This can be a ISBN number # or the project homepage. # # epub_identifier = '' # A unique identification for the text. # # epub_uid = '' # A list of files that should not be packed into the epub file. epub_exclude_files = ["search.html"] # -- Extension configuration ------------------------------------------------- intersphinx_mapping = {"python": ("https://docs.python.org/3", None)} myst_enable_extensions = ["colon_fence"] myst_heading_anchors = 3 napoleon_use_admonition_for_examples = True pontos-25.3.2/docs/development.md000066400000000000000000000012661476255566300167510ustar00rootroot00000000000000(development)= # Development **pontos** uses [poetry](https://python-poetry.org/) for its own dependency management and build process. First install poetry via pip ```shell python3 -m pip install --user poetry ``` Afterwards run ```shell poetry install ``` in the checkout directory of **pontos** (the directory containing the `pyproject.toml` file) to install all dependencies including the packages only required for development. Afterwards activate the git hooks for auto-formatting and linting via [autohooks](https://github.com/greenbone/autohooks/). ```shell poetry run autohooks activate ``` Validate the activated git hooks by running ```shell poetry run autohooks check ``` pontos-25.3.2/docs/favicon.png000066400000000000000000000501421476255566300162350ustar00rootroot00000000000000‰PNG  IHDR szzôgAMA± üa IiCCPsRGB IEC61966-2.1H‰SwX“÷>ß÷eVBØð±—l"#¬ÈY¢’a„@Å…ˆ VœHUÄ‚Õ Hˆâ (¸gAŠˆZ‹U\8îܧµ}zïííû×û¼çœçüÎyÏ€&‘æ¢j9R…<:ØOHÄɽ€Hà æËÂgÅðyx~t°?ü¯opÕ.$ÇáÿƒºP&W ‘à"ç RÈ.TÈȰS³d ”ly|B"ª ìôI>Ø©“ÜØ¢©™(G$@»`UR,À ¬@".À®€Y¶2G€½vŽX@`€™B,Ì 8CÍ L 0Ò¿à©_p…¸HÀ˕͗KÒ3¸•Ðwòðàâ!âÂl±Ba)f ä"œ—›#HçLÎ ùÑÁþ8?çæäáæfçlïôÅ¢þkðo">!ñßþ¼ŒNÏïÚ_ååÖpǰu¿k©[ÚVhßù]3Û  Z Ðzù‹y8ü@ž¡PÈ< í%b¡½0ã‹>ÿ3áoà‹~öü@þÛzðqš@™­À£ƒýqanv®RŽçËB1n÷ç#þÇ…ýŽ)Ñâ4±\,ŠñX‰¸P"MÇy¹R‘D!É•âé2ñ–ý “w ¬†OÀN¶µËlÀ~î‹XÒv@~ó-Œ ‘g42y÷“¿ù@+Í—¤ã¼è\¨”LÆD *°A Á¬ÀœÁ¼ÀaD@ $À<Bä€ ¡–ATÀ:ص° šá´Á18 çà\ëp`žÂ¼† AÈa!:ˆbŽØ"ΙŽ"aH4’€¤ éˆQ"ÅÈr¤©Bj‘]H#ò-r9\@úÛÈ 2ŠüмG1”²QÔu@¹¨ŠÆ sÑt4]€–¢kÑ´=€¶¢§ÑKèut}ŠŽc€Ñ1fŒÙa\Œ‡E`‰X&ÇcåX5V5cX7vÀžaï$‹€ì^„Âl‚GXLXC¨%ì#´ºW ƒ„1Â'"“¨O´%zùÄxb:±XF¬&î!!ž%^'_“H$É’äN !%2I IkHÛH-¤S¤>ÒiœL&ëmÉÞä²€¬ —‘·O’ûÉÃä·:ňâL ¢$R¤”J5e?奟2B™ ªQÍ©žÔªˆ:ŸZIm vP/S‡©4uš%Í›Cˤ-£ÕКigi÷h/étº ݃E—ЗÒkèéçéƒôw † ƒÇHb(k{§·/™L¦Ó—™ÈT0×2™g˜˜oUX*ö*|‘Ê•:•V•~•çªTUsU?Õyª T«U«^V}¦FU³Pã© Ô«Õ©U»©6®ÎRwRPÏQ_£¾_ý‚úc ²†…F †H£Tc·Æ!Æ2eñXBÖrVë,k˜Mb[²ùìLvûv/{LSCsªf¬f‘fæqÍƱàð9ÙœJÎ!Î Î{--?-±Öj­f­~­7ÚzÚ¾ÚbírííëÚïup@,õ:m:÷u º6ºQº…ºÛuÏê>Ócëyé õÊõéÝÑGõmô£õêïÖïÑ7046l18cðÌcèk˜i¸Ñð„á¨Ëhº‘Äh£ÑI£'¸&î‡gã5x>f¬ob¬4ÞeÜkVyVõV׬IÖ\ë,ëmÖWlPW› ›:›Ë¶¨­›­Äv›mßâ)Ò)õSnÚ1ìüì ìšìí9öaö%ömöÏÌÖ;t;|rtuÌvlp¼ë¤á4éĩÃéWgg¡só5¦KË—v—Sm§Š§nŸzË•åîºÒµÓõ£›»›Ü­ÙmÔÝÌ=Å}«ûM.›É]Ã=ïAôð÷XâqÌã§›§Âóç/^v^Y^û½O³œ&žÖ0mÈÛÄ[à½Ë{`:>=eúÎé>Æ>ŸzŸ‡¾¦¾"ß=¾#~Ö~™~üžû;úËýø¿áyòñN`Áå½³k™¥5»/ >B Yr“oÀòùc3Üg,šÑÊZú0Ì&LÖކÏß~o¦ùLé̶ˆàGlˆ¸i™ù})*2ª.êQ´Stqt÷,Ö¬äYûg½Žñ©Œ¹;Ûj¶rvg¬jlRlc웸€¸ª¸x‡øEñ—t$ í‰äÄØÄ=‰ãsçlš3œäšT–tc®åÜ¢¹æéÎËžwç|þü/÷„óû-G8Ï cHRMz&€„ú€èu0ê`:˜pœºQ< pHYs  šœ €IDATX…VkpUÕþÖÞû<î½¹y‘' IÄ"X±*ò®E‹XÔ2¶B§:Õ*"m­‡©¢uÚ:b‹E TlËÔÖªÕ:TÄ`Q“òV HHL äuonnÎ=gïÕ—Gx)ÚYsæìk}ë[k{QQQÎÓlîéìì\‘•ÝMDwêTpI±H ±õ\ƒ6ŒótÁD‹"‘ÈÓ}±„"AÙ™3+¯§Lëfð9¸ ú¼óe WŽÊcÉsõÿm“¶* ß8™~Z±÷¦û¾2¸R¤ÎÀ@S𑥝,„]×½ÔÊùà`B1ÉÚö ´¥ýûX :;cÌé§ÌÇ?àèßñ@GÃEMD)¿"ÓÐÈ,¶±ŒCúÆ2Øu±EÔ«_p(ðƒ³ÿ´C!% é(°a" °À0Ú€µ1Üã!sOŸÎ©BÔåmI ®Ìæ`\á ëݦgµà;úz“‰³)?…S%!°Êr¨ •‚““² mÀ @RŒ-V9\Q4T„2B"œe>Fµ6d.É5ÒQ73›_òQFÏÈ¥?Ä †ÁÄ`2`fèÞ²Ê ¾[0ã¢ûÃ×–Q[n ö¶ÅUCD,€€ˆB0(Œð’…þ›»k ó«gpVhg0“ðò\:ô¡ì9cî꫈„÷&ÚÑÓ‡º¶P6Y)2I q¬ 2éèÜÑ2Ü<ÙV{à !EÏñX:={â\“qï¢Ü)UkÔ‚± U‰p}¢‰X” K’Öt"9àJCJú½®æüÙc.ŽºÞKy8æVȆ²!my: Ò²ÀÄP®DjÊs®XnæŽ×¼c?­«…—¯ Ò §©?fÌ@Ô(‹"é²Ì» ~Í¡ »šÛÖj£=m42²3!”üj~,9käÝ“–©›†Û¿o’m¤‚±šÈAcO+DiÐG‡ŒØÈP€g BRhÁ ¨MyA¸8±½¥Ï±í÷Ç´Òy¿tt2uSåÂéÏóÕóöÆ¿@ϪíxdÞøÁÝwÁO-ÄÆx¡P( "¬€^_Ë¿î7bKiA)âØ´¸Ì(+’–°f‘«å~ºpV&Ì1ê‹bBiÎg] ˆíjÁÔœÑX¸èA„Â!@4áXšrW‚zõÒ^ÍŽ$3¹D˜áY5‡™7 R¬ §<ÕëÝÀÆÔIK¶h;#6|Ùˆ;¯[™<,g{[R*@xk ‹~üäñ䯼¼ìÏŠæ¥E²5©­?îÑzâ@¡¿ž¯` ‚-a:úùŸcrm=ø—Sm׫›^ùÈïýüÈÃʱþp&Ńg]úráwFØqd?úŒ7pvÆðög›°5^ê7áí`'üqyAºz±£Ã0ô5E ž!0—D!D ù}|ñ•™ÙŸ¶ þéÁiʱVK«ßÊ¥r+­ºpÁÄ«x‡ÑÖÛ E"`païå6à­žmø¬°Tž É”>®z)•òØu]œ €2Ý é†ÜÅ>ü>zƒÇàˆõÄ„X¬YÙY€T@A åû®å°b±Y6&ZÅžî+)¸è êâP¯G°ãH]4?;oò¤I¢lH™,,(=­]fÁOð{à54~õܪBéD\’ÿn†|©´«ýW"i~ްÚ‚çy8#Y¡(ˆ¤ E9KšGŒæH½™ò¼Z²§ß7ÿ¾eÓ&LÁ¶jÍÊš×ÍöŠ^)FçK;3DÔÚùÚˆ÷VsJ/óFÁ”dWÌø2N°8XDpÀ0>L¯¤à™@†,g~¤²ø©žëòàΕ‚<ìAn8Úܶ-=¿'ßá°ìB` á\œ¾òAðqì• „ОçýN%“ƒegê6µá°͉NìíÞŸ8¬ª9à$ĹøÉö é?÷ò+IEND®B`‚I,.¨ïËè¦À£ÖB”€™a(çLšÒWN-c×>›Á?n35š¾H4Ú I$ËkõPfâ_ǶÖâ¼)‡¥™H*bH:€$!0ºç® ÷<ɦ´ ÓÃTx²­e¡ýËpSúŒ#cëø EQ­x–ûp|דRøé8 ­™T%Ñ™’,LCriåÕyÃ$Úgï1&îÛ3„/Ê °(ac<›o¡áfì8>µ¨[.¿±u~kGqÚÏÙúoB£!8E¸;%Oö¥äÏQÒ)Q”Ã×=pl´kTß`n¨'7„µñ7ºÑu!Óz^çKh¤FÉc8 é°8¡5_ÎJ:§+’•‘¢ÛžÓ’OñxN¿ã‘CX þþþÿNz;dzÏB¾„F¾µÈív.HÒ[TV%5²TJJ6Û«¬¹œ: ¢7<_÷ÿ>甈§a-B[e¸áÂæ?ÃÛBãÛÛ)ù¶'¥ôÖYŸÐšËëL‹F²âЧx¬ßOp=›Æ¤añÚ2#¥ù½Zк1¼šÔ >6%4})ˆV¥·‘€¬³`+Ýžýµ•³ãeŽçÓâÞx ‹¡i¡Ëþz_A„½éyý(5S[ ¸–£«ŽêÚ;•úêêµeQ?ì_Kå]ɧŽÞ€qo/zfŒžáz"ÂRI_ -âþ‚¨oðgG²‘$xÏ›Èt:jþâ·©3÷®Ru õ_<¸Iõó,í5eCB‘„¬Qˆ +9w6Ã=HLJ<Êñ¼÷âqDXÂåüÉ ¶AE½3èšB ß8„ù‰>J ±-¯Ž¾Ô®‚£”©¨¬«¦2'4¾6´aI>ï†Dd•B܉¬q§Ø€cl]x$æÚR#B{0o NáS\Û¼ÈV©ãg¥uŒ§NܹD™‹ÛJ)¿0c"•=Ü®e…¤¯'–uóËUz9íƒ óU…RQÚÔ`®ÿ»ZU 5dŽG!n»q>;äÏï€dÊA‡·’/8,éYnÊ{³é¬éå¾7ͨU»91õ„ÖvÀ„‰ø ÄÐnªðן-R‡&tF§äÕx^„æÝ)€ºTv²¾Úñ›ñû¸jÛ_l퇬Ryv`ŽB£º`¡R˜2¥‹æ”¢³Û.Ù3A;¡Þ/_òm§[HL±í°Åà “9 …'B(ž‹ìsš² Þ.'Ù´Ê•Ï ¯<Ï àÂÞ’ $¤Ùk•@Cí÷ã[)kà…%ï«è¦ø¶ýàü‹±»-f™™°äÝüÒ% ˜jÖôª-+'Üa±Ô¦ã)EðXh³ 7Qà¨ä"¾„F4ÑÈ$!“ú'Gq,}—²ŽÜÈT—óImZW/ÖML²“¶]T6*¬mXgscójÕ5±šþö :D÷™Õ`­qƒðÅ# ˜æ0ÄÕÆáÉE0Zl’òæÆ-&àA n½ÚÔ¡iVö7o"øû©¬8uX/Aæ`åÑ-ÆVÁ€qÚÏÙ÷Q›¬;S)KN‡aîyøß÷d‡"Â=þÛC^$á~˜éó{þ8ðšÚQS*ð3mÂÓ‹ ËrÀ*i¾Y\Âý*€Ðªù7§5x7ÆÍ«Õ9RÇn] l…G7“šˆ‡#KÞJ:¦ÐªÏ[žÄTwÁ®»dƒBvÀ—Έy6Ð=îqSé¬åTØ€„Ëy†ß¡Aîí´Í„ÏX£Óéð#ž@&“i`c´(ptòs«IÙ8»’/¡%l™Å»õ‰µÏ“E/í¥¥ïQ¶Ä·;W“´m‰±#@Þ;h½aØÀ<…õ9Y¼Ïfˆopø5»––µg‰B£¼Îz}µê­•þøm¦~^ˆÓŒh"´ÿ ‘ß|ƒh\œWM+$ò¸r€ù2Ïý‚7mp\8U^[iSR›óõÆÙÏRÅEhK3ï*|u;,BfŽêó i¾MòIz¡YÏñ¥x®FtÚé^ o9Iíxš#b{-¾ñ y;þ‡q7…4¨{ðyäü†ç(¶]ºo ektÕGÔÓÞ%ï¥UjU—ÌœÍPh¶IßK¹`ÉϦpÕ²gKkmqÆ3-M¢¯æƒGãh_nލ÷ž+BhþÔY¿ÝG†1nØèŒª¶¡Î¦„FÛÕš–ÍôÒù±â^2³ ±Îî›HUŠŽ.»e/Ææ3ÅÇÌ]Ò‰Q{XIí市±ÔÑáëÌv™¡?Oµ}Ú|£¹33IuÚ0ÞB»,ýØ:¦ÍºöTžÍ­´o ãi %=“b ´yVàZ!%º—ª  ¬±Š_ïYÝŠ\’QÂ&ò(~¢…eâºÑ.t=Î.°ßÒA†)¿¾17„Zä¦ÉfØòU¶Mþ Õÿ³%ꌺ ˆ%Ùê„ök¯:y BÝ2¹—ÓAé¼®‡­e•1-•Ns†o­„ö[(ÉQû»—©gÃÁÁáÿ $㮬2E<°xâ~4ßî Á¥ 4Z/d>§tù3–Ùü¿À`â@ÆMºùü>›[ikNí0žUƒ_Z—в«åY3•0°9Oòaê¥ÖúÌŠgf´«wsR«7̾³ˆTzõÎ|>í°s”ï1kð"qGa¿¿þþÿ²àïŸÀŸ‹`ý ÿÜOB¦¤ æ®Áµ![´L§—a-š.ȳ2uÿZ´ä-sxÍçäm¥Í‹eÜÀÄZ"V“-Ad‡H ¯…UÑ/¸µ-´ZhwÊá[³%ïì¾ Êc [Û²TtõbÌÊ*üuÇ›ý,Ù“8 M§-‹Y=ùa Y«õÓøJ<ÁÕ®ÏCR;ˆY ª…; q×Õ'­›ñ€·r-èÆëþdÑ/ïìÆ¸±È [ã½5ߪJP­k¥5Èc<×ò ”Cð_òIZX’¤Û€ehô•'?ûcÖE>¿_øwÃo1*än˜^narCR9ˆw‹ì’ÊQCÅÿ:ù¶8š÷šá– &¿žÀ¸©"ƒ©*˜`K¹!2JLCWäÙA«yTüß’Îè²OlEÁ¤T„%SY#Y–ýØZÿ¼{>×ïèíw5OøÆ•³zSiÔÔÚwbà-#×îÓa‚¦?uZ?Þòî Ë,ÏwWeSB+«.§ÂÒº¶Wí1[d3MYÊXïí¬e凜èIíîN‚°o† úNÊ™YüzŸ#Ž^.—‘ÔÚ¡=gT?¥û5aãìÌWÞ0:°0 , FEÛ«0²Î–q´¾3†gèÆF´¦¨cBÉÞn¤rÒœsø>óX{Q‰;Ì*A4öqY‡|pè‚©öZ1¤NÈžm.²Ðå÷É÷§u?¦vÒÞARkû™Î0’k!#”ÞùLujàý·y¥XÞ×ò® ¨]0m¨ŒñýmZ¾1fÁLš@¶3åðSëÕ¥)\6qe7õÍå"V !uq¬:h!®Gô?Å3ôrÒ'BÁ[0…ØšVÊ–9õÁ¯õÎç1$IM¤™NÒ¤Þâ›Þ)À½*5‡¦3ÝB=F È‚¸vLjÉž?lf¡~s½kôZBëYi¯&ìà¢Õ‰âdÜl•òžØ íɘCÇ|sIM–ÈHjŽ¿÷§ÒͰִÞÎ÷ÔÄm¥ýj oLÅý0¡XÈC&êknô!ëÉ"×Ý´ˆŒ¶¹8zó<5õ“—)/7.’ õRâ°n^ŠÛÊyÐa@›™7úe""Bi©ì|Œí=B×O0™Ô4.Žå,ïý ²JëÚÇ:?»@h‘" ÀZõоšÀ{3zc:u¸ø,oÙ!2aŠÙ‚ß¿Ð:æ9-œ’Îé*ž8Ô¸ˆõœŸÙOüIƒ"ëë¤HØä¡ã#N²Zþû%" ¦d@¡È›«^í'â!ÃØžÐ"çtvº“¾m~ƒ‡¸v´õíÐPÁ›:¨K¨¾ô‚¸¤Ùü£MßëëØ>Ýòõ¯åŸê­±ôÏp[d†±h¤—|”&®àúXnõWÚÙÖ&ă…^¡u3)q |„ëúÞPz$”ÔbWM.çù.‘"vd„ìdpÓ ãh)›øOÒË·äL·x½jƒ_«ÒUøèô¢”DÇMtÁõ™]6q~~ ‡6GjM¤ýBì1¥V] èË'Ù7ŸÏµ…& B^ë-äs<‚vÃldëã)¸Ùk ãC±KÇ R°%Vkf;[´þtvo2sÖPòþ”ô…XʰæIT¤ö~ê^NRƒù›m•Ô»IÉSôê5¼žNs×ÞZÁß MÙ<§&¦U ÜGu`±MDÚ±n=Ú$£Fõ©= „šáÁëÇÙ®Ÿ1Ø•ÏôoJéAÉFtÖ H’‘t¤ù]ò}æã³AÉ¿A•9̉ŒÓw'È&ER²1á”ì¹Ny-d3¥/Æ>v1Å1[€º-‘Ë)c½?ÞNÛ4©ýIà@a„O»åð¯„Û|®é Z¤µëKÜoh\©_HlÖ‹£6ª»Å– ©G#+‘ ^ÞÛv¤Æb©)Ü)Ùèp2å›j‡ÖôÑršœÊíå÷…™ ú"](Sá´Ö~Ìâ5_ÁÆ527´Ïnðz&꺆» @² û:[|éuZJˆ‚mÓrùã9Û¶ÎdÐZf²¾[µ5ITíEƒ:qÉðT@<°®=ýÎ’%™¥Š(ÝlMü 7Íeɧéå|®Þ‡ÑxάZçHÿk iÔ@Œ-ÙÈrVÚ‡F³:¹*¸÷ÍŠ½Ìóºg·&D ™5_K2®ZáF­8LŪäöFâI2%ž%4ñHéyLò¸kßô¨ã[æj¸5ŽþîEŽÕ8::J‘Ì„\.O7lvöéU,”ЭÙÛÉQeþ¤™¤‡H< ‰ŒÆíäÊ:O684¯½þîÈ©‘§ûïæ”-"K­z H÷?WcTæÑØšœ3«NëïvÓJI¨•ÈJæYh „ª,nQ¾áå\›’3»Jh¶ÓÙÂÃS„vH$éÿDc8ØŸ¦ŸdË’Ñsíý襋8D&›[pò¿| ·ªæ×_;±Ö56è²53ë8èìd©ò X]+j¥®ƒÄÑö­µr‡¥åížØˆÐd¼ÏvN)¦æäÖÝÿ8H¡×:‹ÏËé€ J†Î{xxüR”ðò †73pdR¡PBë E¶’åHmb-îyé-Æ¢TÐ+³—{!sÔqùÏP©ÊUŽêBÖBŽF–ævºªƒ:ù»W¥mž['Pjz;û#Y´¥ ü!¡ëXK; $Ân\rPúµ„ꇕÖiâM![ñw;s K â~š 89à³zEÛ[õro'Æ1sÒç£öÙÕý€R®±{­Ö~§P¤!]ñܨ±Fnç˜Á]QŸG‚h«ÊâŒ"ޤ¯¯Y‡Ÿ2+í®–ï±TxƒÈˆíd,@³ºnÔýn§Ÿ[UZîÜZ!„–¼e6ôÅõE‚hËk~,±NhUZåQ;ì2‘=°– #(\üBÖ…˜•“îÇ­›Võý¨ÛžÝ;_2áZwáØþ ™‹ÝíÜd ·ÓÓJ¡pÙØ:‰óbì4ê¼ËîîÉ¢n¤¼©ÜBsK ¾’¸q&­‘öÉàB8s Ë;‘º˜“# oXÀÈ$Án'™‘ˆ„ÐNÖ²ìr¥£æê•LŽÜkwDß#`›©„æ™~1-wk¯tç Í NFö¢V«u4Tjpòv­IÝ2§Fx¶³’A{Н}œzœÅ:©–ΈÙoW÷difXS%‚ f5ªûÉ›fÕóJ° ˆ9/ÀRûŒÞJ3JãÇ|5RpMšµ¨àj%ë$Õo5ËÁª”½›oW÷ã±6›0Õã®^;t¿ÒÇ›9[ 7L¯²ªç™ÍC3&´†7Ê»g¸à8Z|Î ,²m¿½‘÷•Zc [¡T”‚†Ü»¹ßô$E¹ZC“ €¦QÅó˜öî76™ïŒ…ËÈbÍ@<ÀM¹Ñ"3ã¤mHY?³B(©¹®z  =WØOÙÈêé4gÚ›<k&4Äw&TᨺØâ^‚Œ}Çucõón„õÓøªä>@&ki¥-4¼I ú_Jh‘§àÁoÿ«V¡UcUò–g7$OÚ¨øºžÑž´%0$þ±qã¹ÒuòºÁg–2Y#@R(İÉ5: Tè Vòmƒ­PvKâ§§pÓµ£Ìp-ßÞPéÄ(V…G˜×Ñ%gªñ –WŸ¹ÈG<Ùì1þ7cŸá â„Sî ž7f4x;Y¿f—CüŒMšºÚžzCáîy|HM² ù Ÿëy[jîŸB“]WN|Àãú%HgÝÎ1F­P£’פ¥Oyk?âj½ƒäÂ[³§*Í+®æð}æ¾×T­x–ŠkL$$ožÍ§÷‚ÝXdr¸-†NhÜuu©›g×%5¯ÕÃñ ÛÙR†»³ ?®…Liµ½Ü‹§¿ìÁGÝVpò„ ]6=vG}\ïr\6¬+ßÍøHø¼rÓñÛÙú>£ÜOfÝ}Pµ;’×pJ®7©l–çê¡”slÀ)ŽëkïÉC¥sW"49¶už¾Þº}I)œ7³&  ÌÁî’'ÞÜJ·f‘¦Ÿó>Žëϵ÷äÀ!£äÀϗܾV<Æs-g–or´Ýõ‚òÐZ«7+†é'ãúÉöìv¦iX¿*\' kÒì,ãY¥÷ØÀ#vTi3 Œ’ƒBvpÜ—Z³H-Ð…ÔjÁûúo»$4ÐIûO µ¢dž”M³M…JÚ< ³v´žþ®çM…»v7¯ªy?ç½öx¤ TÂ1˜å¾Y– ¯Ó~–ëïµçŽ oHØ?û]ZdëE‚xØí¤¿ñÍÄClýžFÝPao—¤6·ë!Ö{ƒ’Í"5gí –ëÏ·×ä€ÊpˆŠ.È£"-w^ƒ 8ÚTà°MþEFàRHË»a[­*¶§¾ÏäÿÏ„¬-RH0‹Ô´ê+,²Cþöj¥}e$+´xìu!„±q2v{8 ¦žT9kOO’Ç{o·Û{öÏÄ“¬÷D%Mgö"• -ß^“A†ýži¡·pÞ.i¡ryªITš¢è*y=ñ¤½Þ;ÙKñGXïÍk ¦ßsBÅ[c+-Çpæ@ÂÓ•p¸¯Š¿=Ú?b÷(´êsæèîK¾ëyÏnIE­ãñœÔl“K:Ȇk?ÔétöØÕÍðføŽÔß²(xðÛéú¢û%E€n‡%ƾɻzo—|’^hq5é¸pÆì°lVóâiÎL™ÕíÑH{ \ÏÃ-5ÒÕTò†YBÜN,ßh‡ëǬ2Ešé ¨°ÂÊjbÈ„ì Ò×)Ó¸¾zõêœúúúbJ à5w¿øâ‹ý¼¬7Wíñö6¿€ÉM|b¡fòê´ ñ7Ò•AdŠÜΡöZh;Ú¨êk~SÖ=WCBh„¶–/¡¹ºº–¬_¿~#pÓCÊLÜ¿ÿ\|||1§ÞÚøÈÝíŠÔTÊ"Nõ’|Ù£ˆ>!Q.ÒEìýKì’ÐüýýÿÜ€²Â.a¾¼ÆÝuÞ0 ¡=Zÿ`ÞÚ´iÓ¶•uƒ² À--Ÿ={öqÖ÷†ÃkN…½¨\Ï÷R„d“ë®Ú£DPRÞ7h»<ÉgH:‘ÛrŽ×°[½4øåGÞØ¥ã‹øØÊ–÷FRhëlJtC,¦Årqq)9}úôÊŠxçwX³­Ò91‡ÚE}ZŠï6+gŽ/Bù†Æ^åÒH]Ú™æ7Ä=6ˆ×üN7,°mû¸Wãwñ鈌ŒFw@è -À0…ó {]!<çßÑgÐÞLâ¡›åEI¾Gw´U-…—âˆëSÃöœ¢¢¢Î@ ­ØÄZMXXØk÷„’v 6wÿʬ„y œî8iU„•g#À‡`½ KŽlögKÔ’ÃT:z=äê\7š¹0ΛŸëÐÉ ‰­µíëž—A½U:Z—Ê ;yÜÔìæÑ£GómÑè®uÛÙæ¬´¾AÛùÄŀДM• pNŸ…û¹qø>øoßÁz]M€kàdømÝåóa¬ª¶]s¦Q¦Ãònа@o¬¸¢ÙH46ÕC˪„V£}\=–§NÚhNÉÆÌ™3ÏðÙðsÛ¡¤cOMMM¼¬Òà2•µµµE¤Ü#==ý8£zGû³ÒnÛ} ßŒ2Ž·Z(qø»U¤mÛÀ–íÔ¬ìÏü ” ø›V6$‰Æ–VBœ7gíÕW_]onZHHH×û|óÍ7kô‡Rk9íD÷6¤Ž 1e®XZ5IÜ!;™¨ ÿw¸wZÄP>z…ÍJ ^7ŽÙ¥W®š"HI>HE±EÇÀô.9LÍÎOæOøû_…8Úms è!×swrrº ?Z"äº ´ÃI¤¤]l3O˜´ÎqoF ;™n¥ i!ããZ•žË®—¦f±Òd“"M‹$ø 騦À–K­ä€rÍµÒ w“뙃<Ñf¡×ÍÎÎÞÝ–Ü™$»›•c¼‹Ìd^Ö³E\%â½A¬ª¶q9ÓÙ•"ÜÍ øJ>NGò±Þ<ÎR…£Š³z}ܸqéçµË\ïõÓO?­z]ȦÒþ²gÄ_„+y;ù,[_-;àXþ™ÉôbÛ®€·kMzî\V+ÍoÍH涨»~¨9¤&Ö È:«AìšÃuÿ!^U ¢ŽÇ-Aj7nÜ8Åõ~GŽY'ð²uØ¢µIS¸¸‹œ³ë•ÎÚ,÷£~72“yVÚ7ÍojØý/²Z¨pHëÍ&l~Ë ´T!YÁåy®Ój>÷ÿ£>Zk)…³gÏrÎ3())Ù!°¾ˆ)(v™oyÿ¶R#ÚYÉ €Ü²nâ£'¢Üuµi[æÖ±‘šËϱ?´.žæ×ùê„,x+iógÍ2GàÉ.ZŠÔvîÜyˆë=¡TãŒÀ8#Q‚%ºKäó²ÄÑÞCV2?A0½ùMíôBöyS3ž¢3)d0Í.¤ôrB"²äaú¦ÇU¨‡*àsïA†{5eA€ÈãiŽ€ø=ø±GB® Œm–Ôñ·•Û©pÓ²Y­ˆ0+²’y šiO”6UZ˜¶ž3»’y*ÔLJ²œ}Ìô•xËT†Gz Yjýœ}ŸïH;??¿+À¥–"´²²²óà °¾gçÎ ½îªU«Ö¶5KMÖ‡µs  –R’ùVZZ‹aC.³õwê~ÈýàžëdRÃD•7|t[ùÞ÷ãÇ›ShÛÉ…ë<8E&C-]ºt·»»{%#Áe#™e$E”§èbj’Ég8T8"#Y&Aðsó›°jZ‰¹3ˆ^šÙ¤æ¬¡¾î„d)êŠ0÷ |ï{ß¾}w1°ã@5x¬{‚‚‚JMyÞüñïWźŒ5vòdqe?%K2KÉTy–òß,ÒøÚB²  y=æI±­ZE%o˜õÈԼ׌0=Ž0;FŸÁÔ§Ö“|ôÍíHû,ŸCÁö~EEE¾[Gk†Ê¿“õé˜Ûê-PÃÂòxÌàŠ dxš^˜Î=¶ãC+Í—E# W+ÆÌgË„䪜ԧ„’̸ Ò?¹fZgzìÚµë•êÝñÉ'Ÿ°Y? b˜Ê®ÿ¢a|l$´%H?V€D"ùߦ›ùÙЈn'ˆxÜK‡Ò.ȆwÞªðtÜÇ5“A>è>4}Wó¡%ÈeÆ @8’+›JD%«,Ehðžl„V}Æ­Þã)£¯Eãêo=±ìÿF²È€Ó¦ôRÖI!i¸¬¼¾è~I–¸2gû è}ÇÔú/BW|ñÅ›,¥‡ ê×`~/w÷üùó-EhPØË–鬒NŽjõV¨ÆâZ.}:2À9ÙÇzñ4EÓÍÖz;_"#îô]›§#©´†[ù]Ï{`mìT¸jrMuâ!¿]ýÖ[om¸ÙUs Šq]¼xñÀ¬Y³Iòú\ñññùB i™ÊѾýö[Æ ¢B©(•½›ßê1´—≔SÇ}©C[+ÚK¤-Ü…U“Ë©¹¯Š$c«õcæ}Ù¸ð<¹·ÓSÜJ:7,³ ³Ëæ²ÉíÛ·¿ð ÝÜÜ*~Žh2ßf2-9räQÆ÷‰ä³n­þeôJÜq„FÖTdëãoÍk}æeO‚V( ’• 2³«d³böƒxá6)g!ËÛÛ»xÙ²e«ÁÍ,6·b&ž¥Ë¦~–W^yÅìy•••ç¡íê:ËÌŠí0«ºÕ íµÄã|ž!„z>Gº± úà‰ êårÕoͨz$ë4–K^Ž;( wÏ5'Ff¸âââÎ>|˜ÌÈ,7—H.]ºt®wÜϓžž~˜XXæ|ŽË—/ïë°‚±¨vR¤(”le/Ä+²œÇ}É!²íH­EY€ìŸIû‘„,¸u¿*ïîŸ]W,Ed¤hâe[Aû0ñÒÌ%³»wïžË좹Ÿ Z¥Žši)ÖÿöÛoy`ÕÐv @ÿäè¾* Åç£H¸ ŠÇ}9ý4™Æ¶¤ÖB8Pä’‹ddv¿ß#é¤ðí }ùE¥È,++ë4ŽoK¨ÌEèõ¼1³#–ølsæÌ1«Õ Èð.dU°¨mäÂl… Qh¢ ÛÃó¹à¼ÎÖ)ëx×àATB[S!’“)šzÞ«l+HÌ[‚(àÙÔ•¿uëÖ-PJq‘² DbÄá™û¡˜÷ÊÕ«W·™™8Ì$ËóP6.B4ƒSä}‚v²öœþ¹Š¡Í¦uH­Q¶‡ÿF$)A£Îjå©>xº#¬ËËËë.ˆ4î8ÙVSäx¸C‚O÷êÕë¢Ȭ`ïÞ½$–WaNËÓ?þ¸ ÊOèÝMGõ9é‡iDChéþ{xÞŸ{X‹ÖŠJ¥Z#S£~ɪDÂâ±÷¼×èf &†ÀÀÀ ųwÍš5¹`íì™,ѾÄP4{\Í|ø3™È ¨jêÔ©; w™Û)Ÿ§È•QcLÞÉm‡ÃOYå" 'ÔÂÀšÝ<ïÓ¸ÇáÈ,­W3šl#›¹I‹Kˆ±G‘ÒQ}^1„††€u²š¼OP6ÀÇÏ¿ýöÛû!¹PcFàd^^Þ¢Ža‰ÏtæÌ™}žžžLYà و°í¢yÎK2JîZ¾b›€ÐâQÄAj¿YkZõyp«j¼í‹n7ª?NxTØŸ†Fòmª²ç*Z-?uêÔ>qwÞT"+ò:Heò=nÁÏUöúë¯3Zµ ­ê¢ôýTÑ I‰ùBøL—xÞ³* ´nÈ&≫ £-6" 3¡t›ýë%J­ú*Ï¢Ø[0òm‹‰ó3yó«^Þ¾}ûN|ä~jM!2xÝÃ×^{mû­[·öZB™¶9ŠŠŠò;uêt›uŠúÒŒrÑ<ã×NB‡o=á#H ¤#“ˆ3?[aÔª«O¨@"k1ˆö‘R§á5nÞ¼ysÙÉ€ öÑ-[¶ìxÿý÷w”õž1cÆäwëÖí¼««k¥™AÿÂM›6å€%uÛ Vãƒ÷Þ{o/K,¯R684OLÏX:1j¥'ÙYøÝEÄi­­ µÖ†lF2k$´Ÿ³ë•xÔíß¿ƒ% cï¸ÜÁ2KÕ»5­ÌÌÌ#.\ØAâöÖ0¡Tãhxx8sy‹V}EòvòYe±3œ <ïa -ê‚ìцJ;·¥¨nB°ô’¤ôã¼pmtŸâû÷ï°„‘îä^˜ºTii2ƒÀÿ Ðøßi Òe°Î*@Ìq[¦U貇LYO;£¬QT€ï}¼¿_2‡ˆA†?Àƒ¢Û(w·ûL¨ìù(N tó¯AY†ÙYMRé?xðà³–&³°°°‹ÐW¹ÍZe#ÖÙ±èèh¶þÑJÙ€`Qí'ɇiWˆÕ(à^Þ ­#²FÛÈ‚¾Á4ÚLúzÂA{%4éÇéw¡‘ú!¡‚ÜÏ ”`\ò¹gaB«ÿàƒ6uߊ×ëÛËZ§í›É'Dõl§Fäz¶-,L…âüŽ~Èm‡ÔLÊPÝ}ÖaY¦ýäþ’ ³JYK#t:]iyy¹ÙÃE ëxÊÝݽڒ„“œ®‚õ´ËŠ™W2®îÔß±N”‚Iñâr7É®“§øn?#ëÄK‘)Ú±}Ì8¥§g€Ý% äCŽsôgVáÞf!B«±™A#uÕ‚ 6ƒu×ZdIŒ’W_}u?Çg»éðU;ÐrTà=݆jmЇçÌ¢ÀZ-ùgâQ»Év~Öí¸&¬MßË—/_-&B *‚Ž…ˆËZÓ:ƒ¬é~ÈÈ–²~[›»©×@S«n ¼¯Ë aóoÈm7ø„Ñ€¾PÑfÙP¥Ÿî ÛföÙgw™[a¡ÕûúúÞ…`|aFFÆ©I“&í_¸páö;väBîike4›%1nÏœ9ó0gìÉßy¸›e"r7@Ae—ÐÙ`‰¿Gâ/È mÄ̆z“ñaûB|ä×^ í:Û9½ËeŽêû8àÍ%4Pxe%4 ¼’Ù³gïÌÏÏßI„$ Oµ 3b/”¬<àj—}‡˜žåÓ_ö¸ îf¾Ðä ÎhÖÚ`±OÁ¶Þ0”¬…Fu“í÷‡ |³†ôB-ÛEpiª&AÕ‚`âîF5Œjª•Q[[{cüøñÜ1(p7%o‰,»9=:j-…f“Ëaÿ?ƒ,ÐþðÎ`<þí¤‡c8k¹ä_IÇÚUràÓnå`{¬‡ÌfĤîvéÒŨQFÁí2;+2#zgׇ Âghq•|@È6Q=Çw’/*y«k4_»à‹\‰'¿ý—xŒfÝ*å]‡Ó.·RS»°v¼øâ‹Ì ܳgÏkͯçççwãܹs¹Öð Äïö€uÆ-õ­Q_•.H>#ªÚ³^AÛMQ 2û3œöEl?rmî§u¿Õæ;^Œ»Î1(øÔf™:ñ¼²†-²©£Fʳ†d·ugw@Ù–× ±¹›’Ó¯)ÝO˜`Õ¡ÍÀSngÉdÿ®è9Ž q¦e›|(–Aç€Nsíwܹs§ÉÉï¾ûîp³D@UNNÎz+k¬ ÂÅ‹@«W/w³¿ˆÜMÈÂËŸ ÝÉsBº‘Ê ;Úw64¨ùàcÚ¥VK¶MWTÖ/„ÕíLJJ:lj&òرcO ´ÏîA)ÇN±¸Ä¥/½ô?u º@Lî¦ä«î·”^N‡L,T> <ÙHl)¤«€=Ʀº#y+él›²Ò>ïþˆ£s J0öšB ‡}jÑjç\„Ìç)±$<ŽÁgº#ÀÝ,͗М®ù”iâgD•O4¢)¾6€GUv¹tbä¾6SÂè|†Côñ!dO˜’HLLÔ×»õéÓg¿™ÓË->[½zõN(!©ç™ÝM1­ôçì*yŒçvèOúPIâ O1‚ŽØžçõ˜à½ÓaYv¨“Ó¢¯ðV<$´6íÛo¿Õ»Ðô½ÑÌù˜MŒ=ú$¯çCGĤL+ù(í²ÊIsÎDwó2x'<½6b{‘×ÁpwïVf‚Ú y¿c\­Oæ¬áÇoS«éxxë­·vó©×Òמ‰HèSö\§<“ö ¸Î°þÛîïxòÖŽ³¹ÃfÛd2ihT·]óeýƒJ^O¸ ’@@¼‘þP,É  žê®l|Ä)EœO¾R§)´‘шE®KB`РAœ ݤÎKú^ÊÑXgK2K~Î{M¼ÿÄúŽÀÓ†°µÕ6U\èR)+ÀE¹¡tT_uÚbsþ4ë.¬µ°ÁúÖ!®ÊuJ×ܸq£5ãk ³}$Œ¸5õ²X¯\R‘/B{-á”B£¼nJLîû»Øê„h5ÂG°Ü>å[úÑ ë$|¾ì4õLÑÀûÓôz²@‡ë´Qi…ìæ¥pÆ ü¯É^Ž?"ª!(Y$Pk³:Ï£ ž*„(Ð8~ï ‘[1‰ÿuà1-¨qÍWLׂJý˜Ðn+îG0šo/ ©ãì rÙ.¦Fô§¿ì~Û„™›MƒP’dž$„XÉmQcƒqk)uxÔZ&bÑ.ºëÂØºóÖž·YTTt4**ª˜‡›~W2)"OT}¶3»˜2…¬‹ànÆáÉAˆÿv ц·™ÁáxT7¤æ|n¸†?ÐA÷;v,lœ¿iQ€„Ñ¥3fœâõ{‚ƘânÅ¢RÖèâ¹Ãe âž.„õ_xZm1¡H‚¿ðç)K[e`]ýfÁÛüÕRŸˆÑ‘Äwfu–ž?~‡%È f}žŸ7o^>Ü^¿«l ¸J5¤¤]Té4¦´¤‘˜[ ž D»¸…j2wò"#tª6,"ø\£·5c0pm§ÆÏÇ4¤eÄÙLqGk òGŒÁŸà5'¦]`@v½ì™Àí&„Êà¹ï€SÑí...ÿN¾µÉ€Y¢ºë³Æa0k`m†•ëKø÷á@4Þ6¶0ÝI”öBÙAÕÂ… ·—––#ådl‚’PrdÑ¢E{`&h™KTÖ+(—LRM©ÆgÝ®+\µÇLP¤ýd›qÇ#­ кá3–- à.Ì÷ÌÿüóÏ÷,^¼x?ˆHî×òŒÍ»äW'˜<@õä“ôB1 A‘ %…&`õÄ„@ˆ`e ±Yö–hž î”'¦ØXg…& A© ‰b‰ãB D8¤oZ›ÐäÞN$‹ºßUìl°¾oSˆ*-É‚þ ¡îBÜø ÖUÖ±ÎT7%“£v‹j ×»)ç¡ÏVh¶úXµQ¸Uˆ6ÒÆ–5a5(BÝv’ÆoÑÚ÷™”ñ>$³Y#°{cDÌj"m' ã“8àTÔÐiÎH^‰;&ªDÀ¨Î»`hõ-¿I,H$ÿ‹;hÛñµÉf$JäÏïS™†ôͤÓJgí ¿éï]®¦wÑ~ˆí¡Ù@y‚Ïv‡Åe¢±Î  WÑÑe·€'" ´„ô÷â@ ÚˆËÅÔJehÕÈ#=¶J¾êqS<%éEŠÎn»ÈÒ[ +Ÿ<ÑŽîWGZ™jGÕÙÈλˆ ­˜æFxy”ÇòPÄ2[È!ø´ûqCG’äÃÔK¢’z+ù´"Äm—2#ÜR 4?|„‚Ì¢lN ²±a»D @['™¹W`Ÿ&!½¯È |ª„ƒèé7Žq{2Pº ùL«Xeï§^§øn8àÙÛD™Ÿ&hН) sɇڤ‡Æ^ƒ¹ÛÈ4)YbÅFmÁä†ú q-4Ù ±ù%±o{Þ•N‰Ü+ñÜÞHdBTgëHðV<>1Á $º#¢+ï´]6«Ë½¤Ð²ìZN[–UVX!E2 XîžG2«&Hg7Mjz Í„)Ä6šg#û=IéÔ§‰EAý£äï0öîrc»Rƒ™íXDàòcX‘øT„¹Ä6ߤdÊ*kÖœãïø$„Å@f1Ô"3I陵ùf㨹¿áG V¼Q`ò$ Ò «ûdì#™Öt¨q^Ãï¤&®qŽÃ2¬uÿD«ÚþH*ÂCa%)£Æÿï&•Jµd^*Þ)@ @ @ @ @ @ @ @ @ @ @ Cüì. eIEND®B`‚pontos-25.3.2/docs/index.md000066400000000000000000000005041476255566300155300ustar00rootroot00000000000000# Pontos - Greenbone Python Utilities and Tools The pontos Python package is a collection of utilities, tools, classes and functions maintained by Greenbone Networks. ## User Guide This sections explains how to install **Pontos** and shows example use cases. ```{toctree} :maxdepth: 2 install tools development api ``` pontos-25.3.2/docs/install.md000066400000000000000000000013421476255566300160700ustar00rootroot00000000000000(installation)= # Installation of pontos ## Requirements Python 3.9 and later is supported. ## Using pip pip 19.0 or later is required. You can install the latest stable release of **pontos** from the Python Package Index (pypi) using [pip](https://pip.pypa.io/en/stable/) ```shell python3 -m pip install --user pontos ``` ## Using poetry Because **pontos** is a Python library you most likely need a tool to handle Python package dependencies and Python environments. Therefore we strongly recommend using [poetry](https://python-poetry.org/). You can install the latest stable release of **pontos** and add it as a dependency for your current project using [poetry](https://python-poetry.org/) ```shell poetry add pontos ``` pontos-25.3.2/docs/make.bat000066400000000000000000000014401476255566300155040ustar00rootroot00000000000000@ECHO OFF pushd %~dp0 REM Command file for Sphinx documentation if "%SPHINXBUILD%" == "" ( set SPHINXBUILD=sphinx-build ) set SOURCEDIR=. set BUILDDIR=_build %SPHINXBUILD% >NUL 2>NUL if errorlevel 9009 ( echo. echo.The 'sphinx-build' command was not found. Make sure you have Sphinx echo.installed, then set the SPHINXBUILD environment variable to point echo.to the full path of the 'sphinx-build' executable. Alternatively you echo.may add the Sphinx directory to PATH. echo. echo.If you don't have Sphinx installed, grab it from echo.https://www.sphinx-doc.org/ exit /b 1 ) if "%1" == "" goto help %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% goto end :help %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% :end popd pontos-25.3.2/docs/pontos/000077500000000000000000000000001476255566300154225ustar00rootroot00000000000000pontos-25.3.2/docs/pontos/changelog.md000066400000000000000000000005551476255566300177000ustar00rootroot00000000000000# pontos.changelog package ```{eval-rst} .. automodule:: pontos.changelog :members: :exclude-members: ChangelogError,ChangelogBuilderError .. autoexception:: ChangelogError :show-inheritance: :exclude-members: __init__,__new__ .. autoexception:: ChangelogBuilderError :show-inheritance: :exclude-members: __init__,__new__ ``` pontos-25.3.2/docs/pontos/cpe.md000066400000000000000000000001201476255566300165040ustar00rootroot00000000000000# pontos.cpe package ```{eval-rst} .. automodule:: pontos.cpe :members: ``` pontos-25.3.2/docs/pontos/errors.md000066400000000000000000000001261476255566300172570ustar00rootroot00000000000000# pontos.errors package ```{eval-rst} .. automodule:: pontos.errors :members: ``` pontos-25.3.2/docs/pontos/git.md000066400000000000000000000003201476255566300165220ustar00rootroot00000000000000# pontos.git package ```{eval-rst} .. automodule:: pontos.git :members: :exclude-members: GitError .. autoexception:: GitError :show-inheritance: :exclude-members: __init__,__new__ ``` pontos-25.3.2/docs/pontos/github.md000066400000000000000000000001751476255566300172310ustar00rootroot00000000000000# pontos.github package ## Submodules ```{toctree} :maxdepth: 1 github/actions github/api github/models github/script ``` pontos-25.3.2/docs/pontos/github/000077500000000000000000000000001476255566300167045ustar00rootroot00000000000000pontos-25.3.2/docs/pontos/github/actions.md000066400000000000000000000001461476255566300206670ustar00rootroot00000000000000# pontos.github.actions package ```{eval-rst} .. automodule:: pontos.github.actions :members: ``` pontos-25.3.2/docs/pontos/github/api.md000066400000000000000000000001361476255566300177770ustar00rootroot00000000000000# pontos.github.api package ```{eval-rst} .. automodule:: pontos.github.api :members: ``` pontos-25.3.2/docs/pontos/github/models.md000066400000000000000000000020501476255566300205060ustar00rootroot00000000000000# pontos.github.models package - [Artifact](#artifact) - [Base](#base) - [Branch](#branch) - [Organization](#organization) - [Pull Request](#pull-request) - [Release](#release) - [Search](#search) - [Tag](#tag) - [Workflow](#workflow) ## Artifact ```{eval-rst} .. automodule:: pontos.github.models.artifact :members: ``` ## Base ```{eval-rst} .. automodule:: pontos.github.models.base :members: ``` ## Branch ```{eval-rst} .. automodule:: pontos.github.models.branch :members: ``` ## Organization ```{eval-rst} .. automodule:: pontos.github.models.organization :members: ``` ## Pull Request ```{eval-rst} .. automodule:: pontos.github.models.pull_request :members: ``` ## Release ```{eval-rst} .. automodule:: pontos.github.models.release :members: ``` ## Search ```{eval-rst} .. automodule:: pontos.github.models.search :members: ``` ## Tag ```{eval-rst} .. automodule:: pontos.github.models.tag :members: ``` ## Workflow ```{eval-rst} .. automodule:: pontos.github.models.workflow :members: ``` pontos-25.3.2/docs/pontos/github/script.md000066400000000000000000000001441476255566300205310ustar00rootroot00000000000000# pontos.github.script package ```{eval-rst} .. automodule:: pontos.github.script :members: ``` pontos-25.3.2/docs/pontos/helper.md000066400000000000000000000001261476255566300172220ustar00rootroot00000000000000# pontos.helper package ```{eval-rst} .. automodule:: pontos.helper :members: ``` pontos-25.3.2/docs/pontos/models.md000066400000000000000000000003621476255566300172300ustar00rootroot00000000000000# pontos.models package ```{eval-rst} .. automodule:: pontos.models :members: :exclude-members: ModelError :no-special-members: .. autoexception:: ModelError :show-inheritance: :exclude-members: __init__,__new__ ``` pontos-25.3.2/docs/pontos/nvd.md000066400000000000000000000002321476255566300165300ustar00rootroot00000000000000# pontos.nvd package ## Submodules ```{toctree} :maxdepth: 1 nvd/cpe nvd/cve nvd/models ``` ```{eval-rst} .. automodule:: pontos.nvd :members: ``` pontos-25.3.2/docs/pontos/nvd/000077500000000000000000000000001476255566300162115ustar00rootroot00000000000000pontos-25.3.2/docs/pontos/nvd/cpe.md000066400000000000000000000001301476255566300172740ustar00rootroot00000000000000# pontos.nvd.cpe package ```{eval-rst} .. automodule:: pontos.nvd.cpe :members: ``` pontos-25.3.2/docs/pontos/nvd/cve.md000066400000000000000000000001301476255566300173020ustar00rootroot00000000000000# pontos.nvd.cve package ```{eval-rst} .. automodule:: pontos.nvd.cve :members: ``` pontos-25.3.2/docs/pontos/nvd/models.md000066400000000000000000000012711476255566300200170ustar00rootroot00000000000000# pontos.nvd.models package ## Subpackages - [pontos.nvd.models.cpe](#pontosnvdmodelscpe) - [pontos.nvd.models.cve](#pontosnvdmodelscve) - [pontos.nvd.models.cvss\_v2](#pontosnvdmodelscvss_v2) - [pontos.nvd.models.cvss\_v3](#pontosnvdmodelscvss_v3) ### pontos.nvd.models.cpe ```{eval-rst} .. automodule:: pontos.nvd.models.cpe :members: ``` ### pontos.nvd.models.cve ```{eval-rst} .. automodule:: pontos.nvd.models.cve :members: ``` ### pontos.nvd.models.cvss_v2 ```{eval-rst} .. automodule:: pontos.nvd.models.cvss_v2 :members: ``` ### pontos.nvd.models.cvss_v3 ```{eval-rst} .. automodule:: pontos.nvd.models.cvss_v3 :members: ``` pontos-25.3.2/docs/pontos/release.md000066400000000000000000000001301476255566300173560ustar00rootroot00000000000000# pontos.release package ```{eval-rst} .. automodule:: pontos.release :members: ``` pontos-25.3.2/docs/pontos/terminal.md000066400000000000000000000001321476255566300175530ustar00rootroot00000000000000# pontos.terminal package ```{eval-rst} .. automodule:: pontos.terminal :members: ``` pontos-25.3.2/docs/pontos/testing.md000066400000000000000000000001301476255566300174130ustar00rootroot00000000000000# pontos.testing package ```{eval-rst} .. automodule:: pontos.testing :members: ``` pontos-25.3.2/docs/pontos/version.md000066400000000000000000000005171476255566300174340ustar00rootroot00000000000000# pontos.version package ## Submodules ```{toctree} :maxdepth: 1 version/commands version/project version/schemes version/helper ``` ```{eval-rst} .. automodule:: pontos.version :members: :exclude-members: VersionError .. autoexception:: VersionError :show-inheritance: :exclude-members: __init__,__new__ ``` pontos-25.3.2/docs/pontos/version/000077500000000000000000000000001476255566300171075ustar00rootroot00000000000000pontos-25.3.2/docs/pontos/version/commands.md000066400000000000000000000001521476255566300212300ustar00rootroot00000000000000# pontos.version.commands package ```{eval-rst} .. automodule:: pontos.version.commands :members: ``` pontos-25.3.2/docs/pontos/version/helper.md000066400000000000000000000001461476255566300207110ustar00rootroot00000000000000# pontos.version.helper package ```{eval-rst} .. automodule:: pontos.version.helper :members: ``` pontos-25.3.2/docs/pontos/version/project.md000066400000000000000000000001501476255566300210730ustar00rootroot00000000000000# pontos.version.project package ```{eval-rst} .. automodule:: pontos.version.project :members: ``` pontos-25.3.2/docs/pontos/version/schemes.md000066400000000000000000000001501476255566300210540ustar00rootroot00000000000000# pontos.version.schemes package ```{eval-rst} .. automodule:: pontos.version.schemes :members: ``` pontos-25.3.2/docs/tools.md000066400000000000000000000107771476255566300155760ustar00rootroot00000000000000(tools)= # Tools and Utilities {program}`pontos` comes with a continuously increasing set of features. The following commands are currently available: - [pontos-release](#pontos-release) - [pontos-version](#pontos-version) - [pontos-update-header](#pontos-update-header) - [pontos-changelog](#pontos-changelog) - [pontos-github](#pontos-github) - [pontos-github-script](#pontos-github-script) - [pontos-nvd-cve](#pontos-nvd-cve) - [pontos-nvd-cves](#pontos-nvd-cves) - [pontos-nvd-cpe](#pontos-nvd-cpe) - [pontos-nvd-cpes](#pontos-nvd-cpes) ## pontos-release `pontos-release` - Release handling utility for C/C++ (CMake), JavaScript/TypeScript, Golang and Python Projects. :::{note} We also provide easy-to-use [GitHub Action](https://github.com/greenbone/actions/), to create [releases ](https://github.com/greenbone/actions/tree/main/release) that we recommended to use instead of manually releasing with pontos-release. ::: ```shell # Release the next patch version (x.x.1) of project , using conventional # commits for release notes, pushes the changes and release notes pontos-release release --project --release-type patch # Sign a release: pontos-release sign --project --release-version 1.2.3 --signing-key 1234567890ABCDEFEDCBA0987654321 [--passphrase ] ``` ## pontos-version `pontos-version` - Version handling utility for C, Go and Python Projects ```shell # Update version of this project to 22.1.1 pontos-version update 22.1.1 # Show current projects version pontos-version show # Verify the current version information pontos-version verify current # calculate the next minor release version pontos-version next minor ``` **Supported config files:** * CMake: `CMakeLists.txt` * Python: `pyproject.toml` and an arbitrary version module * Golang: `go.md` and `version.go` * JavaScript/TypeScript: `package.json`, `src/version.js` and `src/version.ts` ## pontos-update-header `pontos-update-header` - Handling Copyright header for various file types and licenses :::{note} We also provide easy-to-use [GitHub Actions](https://github.com/greenbone/actions/#usage), that updates copyright year in header of files and creates a Pull Request. ::: ```shell # Update year in Copyright header in files based on last commit in corresponding repo, also add missing headers pontos-update-header -d ``` **Supported files:** `.bash` `.c` `.h` `.go` `.cmake` `.js` `.nasl` `.po` `.py` `.sh` `.ts` `.tsx` `.txt` `.xml` `.xsl` **Supported licenses:** `AGPL-3.0-or-later` `GPL-2.0-only` `GPL-2.0-or-later` `GPL-3.0-or-later` **Copyright header schema:** `Copyright (C) 2020-2022 Greenbone AG` ## pontos-changelog `pontos-changelog` - Parse conventional commits in the current branch and create a changelog from the commit messages. ```shell # Parse conventional commits and create pontos-changelog -o # Parse conventional commits between git tag 1.2.3 and 2.0.0 and print changelog to the console pontos-changelog --current-version 1.2.3 --next-version 2.0.0 ``` ## pontos-github `pontos-github` - Handling GitHub operations, like Pull Requests (beta) ```shell # create a PR on GitHub pontos-github pr create [--body ] # update a PR on GitHub pontos-github pr update [--target ] [--title ] [--body ] # get modified and deleted files in a PR, store in file test.txt pontos-github FS -s modified deleted -o test.txt # add labels to an Issue/PR pontos-github L label1 label2 ``` ## pontos-github-script `pontos-github-script` - Run Python scripts for GitHub automation. A number of useful GitHub scripts are available in the [pontos repository](https://github.com/greenbone/pontos/tree/main/pontos/github/scripts). ```sh # List all members of a GitHub Organization pontos-github-script --token scripts/github/members.py ``` ## pontos-nvd-cve `pontos-nvd-cve` - Get information about a single CVE ```shell # query a cve pontos-nvd-cve CVE-2021-38397 ``` ## pontos-nvd-cves `pontos-nvd-cves` - Search for specific CVEs ```shell # get all cves with a specific keyword pontos-nvd-cves --keywords mac apple ``` ## pontos-nvd-cpe `pontos-nvd-cpe` - Get information about a single CPE ```shell pontos-nvd-cpe "9F5DB8E0-14E4-40EC-B567-CF1108EEE735" ``` ## pontos-nvd-cpes `pontos-nvd-cpes` - Search for specific CPEs ```shell # get all cpes for a specific keyword pontos-nvd-cpes --keywords macos ``` pontos-25.3.2/poetry.lock000066400000000000000000004306721476255566300153600ustar00rootroot00000000000000# This file is automatically @generated by Poetry 2.1.1 and should not be changed by hand. [[package]] name = "alabaster" version = "0.7.16" description = "A light, configurable Sphinx theme" optional = false python-versions = ">=3.9" groups = ["dev"] files = [ {file = "alabaster-0.7.16-py3-none-any.whl", hash = "sha256:b46733c07dce03ae4e150330b975c75737fa60f0a7c591b6c8bf4928a28e2c92"}, {file = "alabaster-0.7.16.tar.gz", hash = "sha256:75a8b99c28a5dad50dd7f8ccdd447a121ddb3892da9e53d1ca5cca3106d58d65"}, ] [[package]] name = "anyio" version = "4.8.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = false python-versions = ">=3.9" groups = ["main", "dev"] files = [ {file = "anyio-4.8.0-py3-none-any.whl", hash = "sha256:b5011f270ab5eb0abf13385f851315585cc37ef330dd88e27ec3d34d651fd47a"}, {file = "anyio-4.8.0.tar.gz", hash = "sha256:1d9fe889df5212298c0c0723fa20479d1b94883a2df44bd3897aa91083316f7a"}, ] [package.dependencies] exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} idna = ">=2.8" sniffio = ">=1.1" typing_extensions = {version = ">=4.5", markers = "python_version < \"3.13\""} [package.extras] doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx_rtd_theme"] test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "trustme", "truststore (>=0.9.1) ; python_version >= \"3.10\"", "uvloop (>=0.21) ; platform_python_implementation == \"CPython\" and platform_system != \"Windows\" and python_version < \"3.14\""] trio = ["trio (>=0.26.1)"] [[package]] name = "autohooks" version = "25.2.0" description = "Library for managing git hooks" optional = false python-versions = "<4.0,>=3.9" groups = ["dev"] files = [ {file = "autohooks-25.2.0-py3-none-any.whl", hash = "sha256:5a2ed5fc03d899725638d2ca6bcb367f025a6196b225a5ea177e502fdf1bb065"}, {file = "autohooks-25.2.0.tar.gz", hash = "sha256:e306a24801fbc815628ebe48b063e2853a0857eb00bbdae733175e06e0ce8a84"}, ] [package.dependencies] pontos = ">=22.8.0" rich = ">=12.5.1" shtab = ">=1.7.0" tomlkit = ">=0.5.11" [[package]] name = "autohooks-plugin-black" version = "23.10.0" description = "An autohooks plugin for python code formatting via black" optional = false python-versions = ">=3.9,<4.0" groups = ["dev"] files = [ {file = "autohooks_plugin_black-23.10.0-py3-none-any.whl", hash = "sha256:88d648251df749586af9ea5be3105daa4358ed916b61aee738d0727387214470"}, {file = "autohooks_plugin_black-23.10.0.tar.gz", hash = "sha256:8415b5f566d861236bde2b0973699f64a8b861208af4fa05fe04a1f923ea3ef6"}, ] [package.dependencies] autohooks = ">=21.6.0" black = ">=20.8" [[package]] name = "autohooks-plugin-mypy" version = "23.10.0" description = "An autohooks plugin for python code static typing check with mypy" optional = false python-versions = ">=3.9,<4.0" groups = ["dev"] files = [ {file = "autohooks_plugin_mypy-23.10.0-py3-none-any.whl", hash = "sha256:8ac36b74900b2f2456fec046126e564374acd6de2752d87255c6f71c4e6a73ff"}, {file = "autohooks_plugin_mypy-23.10.0.tar.gz", hash = "sha256:ebefaa83074b662de38c914f6cac9f4f8e3452e36f54a5834df3f1590cc0c540"}, ] [package.dependencies] autohooks = ">=21.7.0" mypy = ">=0.910" [[package]] name = "autohooks-plugin-ruff" version = "25.2.0" description = "An autohooks plugin for python code formatting via ruff" optional = false python-versions = "<4.0,>=3.9" groups = ["dev"] files = [ {file = "autohooks_plugin_ruff-25.2.0-py3-none-any.whl", hash = "sha256:b0e3bbfb2d8bb94c509dad291801af1f6821877147b7f02f7e93812f8aef6ba1"}, {file = "autohooks_plugin_ruff-25.2.0.tar.gz", hash = "sha256:3290cd5b1939d80113e5a3b6408e4728e1c2bed0960b8101479897a196dc69d6"}, ] [package.dependencies] autohooks = ">=25.2.0" ruff = ">=0.0.272" [[package]] name = "babel" version = "2.17.0" description = "Internationalization utilities" optional = false python-versions = ">=3.8" groups = ["dev"] files = [ {file = "babel-2.17.0-py3-none-any.whl", hash = "sha256:4d0b53093fdfb4b21c92b5213dba5a1b23885afa8383709427046b21c366e5f2"}, {file = "babel-2.17.0.tar.gz", hash = "sha256:0c54cffb19f690cdcc52a3b50bcbf71e07a808d1c80d549f2459b9d2cf0afb9d"}, ] [package.extras] dev = ["backports.zoneinfo ; python_version < \"3.9\"", "freezegun (>=1.0,<2.0)", "jinja2 (>=3.0)", "pytest (>=6.0)", "pytest-cov", "pytz", "setuptools", "tzdata ; sys_platform == \"win32\""] [[package]] name = "beautifulsoup4" version = "4.13.3" description = "Screen-scraping library" optional = false python-versions = ">=3.7.0" groups = ["dev"] files = [ {file = "beautifulsoup4-4.13.3-py3-none-any.whl", hash = "sha256:99045d7d3f08f91f0d656bc9b7efbae189426cd913d830294a15eefa0ea4df16"}, {file = "beautifulsoup4-4.13.3.tar.gz", hash = "sha256:1bd32405dacc920b42b83ba01644747ed77456a65760e285fbc47633ceddaf8b"}, ] [package.dependencies] soupsieve = ">1.2" typing-extensions = ">=4.0.0" [package.extras] cchardet = ["cchardet"] chardet = ["chardet"] charset-normalizer = ["charset-normalizer"] html5lib = ["html5lib"] lxml = ["lxml"] [[package]] name = "black" version = "25.1.0" description = "The uncompromising code formatter." optional = false python-versions = ">=3.9" groups = ["dev"] files = [ {file = "black-25.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:759e7ec1e050a15f89b770cefbf91ebee8917aac5c20483bc2d80a6c3a04df32"}, {file = "black-25.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e519ecf93120f34243e6b0054db49c00a35f84f195d5bce7e9f5cfc578fc2da"}, {file = "black-25.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:055e59b198df7ac0b7efca5ad7ff2516bca343276c466be72eb04a3bcc1f82d7"}, {file = "black-25.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:db8ea9917d6f8fc62abd90d944920d95e73c83a5ee3383493e35d271aca872e9"}, {file = "black-25.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a39337598244de4bae26475f77dda852ea00a93bd4c728e09eacd827ec929df0"}, {file = "black-25.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:96c1c7cd856bba8e20094e36e0f948718dc688dba4a9d78c3adde52b9e6c2299"}, {file = "black-25.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bce2e264d59c91e52d8000d507eb20a9aca4a778731a08cfff7e5ac4a4bb7096"}, {file = "black-25.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:172b1dbff09f86ce6f4eb8edf9dede08b1fce58ba194c87d7a4f1a5aa2f5b3c2"}, {file = "black-25.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4b60580e829091e6f9238c848ea6750efed72140b91b048770b64e74fe04908b"}, {file = "black-25.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1e2978f6df243b155ef5fa7e558a43037c3079093ed5d10fd84c43900f2d8ecc"}, {file = "black-25.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3b48735872ec535027d979e8dcb20bf4f70b5ac75a8ea99f127c106a7d7aba9f"}, {file = "black-25.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:ea0213189960bda9cf99be5b8c8ce66bb054af5e9e861249cd23471bd7b0b3ba"}, {file = "black-25.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8f0b18a02996a836cc9c9c78e5babec10930862827b1b724ddfe98ccf2f2fe4f"}, {file = "black-25.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:afebb7098bfbc70037a053b91ae8437c3857482d3a690fefc03e9ff7aa9a5fd3"}, {file = "black-25.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:030b9759066a4ee5e5aca28c3c77f9c64789cdd4de8ac1df642c40b708be6171"}, {file = "black-25.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:a22f402b410566e2d1c950708c77ebf5ebd5d0d88a6a2e87c86d9fb48afa0d18"}, {file = "black-25.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a1ee0a0c330f7b5130ce0caed9936a904793576ef4d2b98c40835d6a65afa6a0"}, {file = "black-25.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f3df5f1bf91d36002b0a75389ca8663510cf0531cca8aa5c1ef695b46d98655f"}, {file = "black-25.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d9e6827d563a2c820772b32ce8a42828dc6790f095f441beef18f96aa6f8294e"}, {file = "black-25.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:bacabb307dca5ebaf9c118d2d2f6903da0d62c9faa82bd21a33eecc319559355"}, {file = "black-25.1.0-py3-none-any.whl", hash = "sha256:95e8176dae143ba9097f351d174fdaf0ccd29efb414b362ae3fd72bf0f710717"}, {file = "black-25.1.0.tar.gz", hash = "sha256:33496d5cd1222ad73391352b4ae8da15253c5de89b93a80b3e2c8d9a19ec2666"}, ] [package.dependencies] click = ">=8.0.0" mypy-extensions = ">=0.4.3" packaging = ">=22.0" pathspec = ">=0.9.0" platformdirs = ">=2" tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""} [package.extras] colorama = ["colorama (>=0.4.3)"] d = ["aiohttp (>=3.10)"] jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] uvloop = ["uvloop (>=0.15.2)"] [[package]] name = "certifi" version = "2025.1.31" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" groups = ["main", "dev"] files = [ {file = "certifi-2025.1.31-py3-none-any.whl", hash = "sha256:ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe"}, {file = "certifi-2025.1.31.tar.gz", hash = "sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651"}, ] [[package]] name = "charset-normalizer" version = "3.4.1" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7" groups = ["dev"] files = [ {file = "charset_normalizer-3.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de"}, {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176"}, {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e218488cd232553829be0664c2292d3af2eeeb94b32bea483cf79ac6a694e037"}, {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:80ed5e856eb7f30115aaf94e4a08114ccc8813e6ed1b5efa74f9f82e8509858f"}, {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b010a7a4fd316c3c484d482922d13044979e78d1861f0e0650423144c616a46a"}, {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4532bff1b8421fd0a320463030c7520f56a79c9024a4e88f01c537316019005a"}, {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d973f03c0cb71c5ed99037b870f2be986c3c05e63622c017ea9816881d2dd247"}, {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:3a3bd0dcd373514dcec91c411ddb9632c0d7d92aed7093b8c3bbb6d69ca74408"}, {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:d9c3cdf5390dcd29aa8056d13e8e99526cda0305acc038b96b30352aff5ff2bb"}, {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:2bdfe3ac2e1bbe5b59a1a63721eb3b95fc9b6817ae4a46debbb4e11f6232428d"}, {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:eab677309cdb30d047996b36d34caeda1dc91149e4fdca0b1a039b3f79d9a807"}, {file = "charset_normalizer-3.4.1-cp310-cp310-win32.whl", hash = "sha256:c0429126cf75e16c4f0ad00ee0eae4242dc652290f940152ca8c75c3a4b6ee8f"}, {file = "charset_normalizer-3.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:9f0b8b1c6d84c8034a44893aba5e767bf9c7a211e313a9605d9c617d7083829f"}, {file = "charset_normalizer-3.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8bfa33f4f2672964266e940dd22a195989ba31669bd84629f05fab3ef4e2d125"}, {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28bf57629c75e810b6ae989f03c0828d64d6b26a5e205535585f96093e405ed1"}, {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f08ff5e948271dc7e18a35641d2f11a4cd8dfd5634f55228b691e62b37125eb3"}, {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:234ac59ea147c59ee4da87a0c0f098e9c8d169f4dc2a159ef720f1a61bbe27cd"}, {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd4ec41f914fa74ad1b8304bbc634b3de73d2a0889bd32076342a573e0779e00"}, {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eea6ee1db730b3483adf394ea72f808b6e18cf3cb6454b4d86e04fa8c4327a12"}, {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c96836c97b1238e9c9e3fe90844c947d5afbf4f4c92762679acfe19927d81d77"}, {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4d86f7aff21ee58f26dcf5ae81a9addbd914115cdebcbb2217e4f0ed8982e146"}, {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:09b5e6733cbd160dcc09589227187e242a30a49ca5cefa5a7edd3f9d19ed53fd"}, {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:5777ee0881f9499ed0f71cc82cf873d9a0ca8af166dfa0af8ec4e675b7df48e6"}, {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:237bdbe6159cff53b4f24f397d43c6336c6b0b42affbe857970cefbb620911c8"}, {file = "charset_normalizer-3.4.1-cp311-cp311-win32.whl", hash = "sha256:8417cb1f36cc0bc7eaba8ccb0e04d55f0ee52df06df3ad55259b9a323555fc8b"}, {file = "charset_normalizer-3.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:d7f50a1f8c450f3925cb367d011448c39239bb3eb4117c36a6d354794de4ce76"}, {file = "charset_normalizer-3.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545"}, {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7"}, {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757"}, {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa"}, {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d"}, {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616"}, {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b"}, {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d"}, {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a"}, {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9"}, {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1"}, {file = "charset_normalizer-3.4.1-cp312-cp312-win32.whl", hash = "sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35"}, {file = "charset_normalizer-3.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f"}, {file = "charset_normalizer-3.4.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda"}, {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313"}, {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9"}, {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d074908e1aecee37a7635990b2c6d504cd4766c7bc9fc86d63f9c09af3fa11b"}, {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:955f8851919303c92343d2f66165294848d57e9bba6cf6e3625485a70a038d11"}, {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:44ecbf16649486d4aebafeaa7ec4c9fed8b88101f4dd612dcaf65d5e815f837f"}, {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0924e81d3d5e70f8126529951dac65c1010cdf117bb75eb02dd12339b57749dd"}, {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2967f74ad52c3b98de4c3b32e1a44e32975e008a9cd2a8cc8966d6a5218c5cb2"}, {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c75cb2a3e389853835e84a2d8fb2b81a10645b503eca9bcb98df6b5a43eb8886"}, {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:09b26ae6b1abf0d27570633b2b078a2a20419c99d66fb2823173d73f188ce601"}, {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd"}, {file = "charset_normalizer-3.4.1-cp313-cp313-win32.whl", hash = "sha256:eb8178fe3dba6450a3e024e95ac49ed3400e506fd4e9e5c32d30adda88cbd407"}, {file = "charset_normalizer-3.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:b1ac5992a838106edb89654e0aebfc24f5848ae2547d22c2c3f66454daa11971"}, {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f30bf9fd9be89ecb2360c7d94a711f00c09b976258846efe40db3d05828e8089"}, {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:97f68b8d6831127e4787ad15e6757232e14e12060bec17091b85eb1486b91d8d"}, {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7974a0b5ecd505609e3b19742b60cee7aa2aa2fb3151bc917e6e2646d7667dcf"}, {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc54db6c8593ef7d4b2a331b58653356cf04f67c960f584edb7c3d8c97e8f39e"}, {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:311f30128d7d333eebd7896965bfcfbd0065f1716ec92bd5638d7748eb6f936a"}, {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:7d053096f67cd1241601111b698f5cad775f97ab25d81567d3f59219b5f1adbd"}, {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:807f52c1f798eef6cf26beb819eeb8819b1622ddfeef9d0977a8502d4db6d534"}, {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:dccbe65bd2f7f7ec22c4ff99ed56faa1e9f785482b9bbd7c717e26fd723a1d1e"}, {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:2fb9bd477fdea8684f78791a6de97a953c51831ee2981f8e4f583ff3b9d9687e"}, {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:01732659ba9b5b873fc117534143e4feefecf3b2078b0a6a2e925271bb6f4cfa"}, {file = "charset_normalizer-3.4.1-cp37-cp37m-win32.whl", hash = "sha256:7a4f97a081603d2050bfaffdefa5b02a9ec823f8348a572e39032caa8404a487"}, {file = "charset_normalizer-3.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:7b1bef6280950ee6c177b326508f86cad7ad4dff12454483b51d8b7d673a2c5d"}, {file = "charset_normalizer-3.4.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ecddf25bee22fe4fe3737a399d0d177d72bc22be6913acfab364b40bce1ba83c"}, {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c60ca7339acd497a55b0ea5d506b2a2612afb2826560416f6894e8b5770d4a9"}, {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b7b2d86dd06bfc2ade3312a83a5c364c7ec2e3498f8734282c6c3d4b07b346b8"}, {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dd78cfcda14a1ef52584dbb008f7ac81c1328c0f58184bf9a84c49c605002da6"}, {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e27f48bcd0957c6d4cb9d6fa6b61d192d0b13d5ef563e5f2ae35feafc0d179c"}, {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:01ad647cdd609225c5350561d084b42ddf732f4eeefe6e678765636791e78b9a"}, {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:619a609aa74ae43d90ed2e89bdd784765de0a25ca761b93e196d938b8fd1dbbd"}, {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:89149166622f4db9b4b6a449256291dc87a99ee53151c74cbd82a53c8c2f6ccd"}, {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:7709f51f5f7c853f0fb938bcd3bc59cdfdc5203635ffd18bf354f6967ea0f824"}, {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:345b0426edd4e18138d6528aed636de7a9ed169b4aaf9d61a8c19e39d26838ca"}, {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0907f11d019260cdc3f94fbdb23ff9125f6b5d1039b76003b5b0ac9d6a6c9d5b"}, {file = "charset_normalizer-3.4.1-cp38-cp38-win32.whl", hash = "sha256:ea0d8d539afa5eb2728aa1932a988a9a7af94f18582ffae4bc10b3fbdad0626e"}, {file = "charset_normalizer-3.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:329ce159e82018d646c7ac45b01a430369d526569ec08516081727a20e9e4af4"}, {file = "charset_normalizer-3.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b97e690a2118911e39b4042088092771b4ae3fc3aa86518f84b8cf6888dbdb41"}, {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78baa6d91634dfb69ec52a463534bc0df05dbd546209b79a3880a34487f4b84f"}, {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1a2bc9f351a75ef49d664206d51f8e5ede9da246602dc2d2726837620ea034b2"}, {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75832c08354f595c760a804588b9357d34ec00ba1c940c15e31e96d902093770"}, {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0af291f4fe114be0280cdd29d533696a77b5b49cfde5467176ecab32353395c4"}, {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0167ddc8ab6508fe81860a57dd472b2ef4060e8d378f0cc555707126830f2537"}, {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2a75d49014d118e4198bcee5ee0a6f25856b29b12dbf7cd012791f8a6cc5c496"}, {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:363e2f92b0f0174b2f8238240a1a30142e3db7b957a5dd5689b0e75fb717cc78"}, {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ab36c8eb7e454e34e60eb55ca5d241a5d18b2c6244f6827a30e451c42410b5f7"}, {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:4c0907b1928a36d5a998d72d64d8eaa7244989f7aaaf947500d3a800c83a3fd6"}, {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:04432ad9479fa40ec0f387795ddad4437a2b50417c69fa275e212933519ff294"}, {file = "charset_normalizer-3.4.1-cp39-cp39-win32.whl", hash = "sha256:3bed14e9c89dcb10e8f3a29f9ccac4955aebe93c71ae803af79265c9ca5644c5"}, {file = "charset_normalizer-3.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:49402233c892a461407c512a19435d1ce275543138294f7ef013f0b63d5d3765"}, {file = "charset_normalizer-3.4.1-py3-none-any.whl", hash = "sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85"}, {file = "charset_normalizer-3.4.1.tar.gz", hash = "sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3"}, ] [[package]] name = "click" version = "8.1.8" description = "Composable command line interface toolkit" optional = false python-versions = ">=3.7" groups = ["dev"] files = [ {file = "click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2"}, {file = "click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a"}, ] [package.dependencies] colorama = {version = "*", markers = "platform_system == \"Windows\""} [[package]] name = "colorama" version = "0.4.6" description = "Cross-platform colored terminal text." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" groups = ["main", "dev"] files = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] markers = {main = "platform_system == \"Windows\""} [[package]] name = "colorful" version = "0.5.6" description = "Terminal string styling done right, in Python." optional = false python-versions = "*" groups = ["main"] files = [ {file = "colorful-0.5.6-py2.py3-none-any.whl", hash = "sha256:eab8c1c809f5025ad2b5238a50bd691e26850da8cac8f90d660ede6ea1af9f1e"}, {file = "colorful-0.5.6.tar.gz", hash = "sha256:b56d5c01db1dac4898308ea889edcb113fbee3e6ec5df4bacffd61d5241b5b8d"}, ] [package.dependencies] colorama = {version = "*", markers = "platform_system == \"Windows\""} [[package]] name = "coverage" version = "7.6.12" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.9" groups = ["dev"] files = [ {file = "coverage-7.6.12-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:704c8c8c6ce6569286ae9622e534b4f5b9759b6f2cd643f1c1a61f666d534fe8"}, {file = "coverage-7.6.12-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ad7525bf0241e5502168ae9c643a2f6c219fa0a283001cee4cf23a9b7da75879"}, {file = "coverage-7.6.12-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:06097c7abfa611c91edb9e6920264e5be1d6ceb374efb4986f38b09eed4cb2fe"}, {file = "coverage-7.6.12-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:220fa6c0ad7d9caef57f2c8771918324563ef0d8272c94974717c3909664e674"}, {file = "coverage-7.6.12-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3688b99604a24492bcfe1c106278c45586eb819bf66a654d8a9a1433022fb2eb"}, {file = "coverage-7.6.12-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d1a987778b9c71da2fc8948e6f2656da6ef68f59298b7e9786849634c35d2c3c"}, {file = "coverage-7.6.12-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:cec6b9ce3bd2b7853d4a4563801292bfee40b030c05a3d29555fd2a8ee9bd68c"}, {file = "coverage-7.6.12-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ace9048de91293e467b44bce0f0381345078389814ff6e18dbac8fdbf896360e"}, {file = "coverage-7.6.12-cp310-cp310-win32.whl", hash = "sha256:ea31689f05043d520113e0552f039603c4dd71fa4c287b64cb3606140c66f425"}, {file = "coverage-7.6.12-cp310-cp310-win_amd64.whl", hash = "sha256:676f92141e3c5492d2a1596d52287d0d963df21bf5e55c8b03075a60e1ddf8aa"}, {file = "coverage-7.6.12-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e18aafdfb3e9ec0d261c942d35bd7c28d031c5855dadb491d2723ba54f4c3015"}, {file = "coverage-7.6.12-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:66fe626fd7aa5982cdebad23e49e78ef7dbb3e3c2a5960a2b53632f1f703ea45"}, {file = "coverage-7.6.12-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ef01d70198431719af0b1f5dcbefc557d44a190e749004042927b2a3fed0702"}, {file = "coverage-7.6.12-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07e92ae5a289a4bc4c0aae710c0948d3c7892e20fd3588224ebe242039573bf0"}, {file = "coverage-7.6.12-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e695df2c58ce526eeab11a2e915448d3eb76f75dffe338ea613c1201b33bab2f"}, {file = "coverage-7.6.12-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d74c08e9aaef995f8c4ef6d202dbd219c318450fe2a76da624f2ebb9c8ec5d9f"}, {file = "coverage-7.6.12-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e995b3b76ccedc27fe4f477b349b7d64597e53a43fc2961db9d3fbace085d69d"}, {file = "coverage-7.6.12-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b1f097878d74fe51e1ddd1be62d8e3682748875b461232cf4b52ddc6e6db0bba"}, {file = "coverage-7.6.12-cp311-cp311-win32.whl", hash = "sha256:1f7ffa05da41754e20512202c866d0ebfc440bba3b0ed15133070e20bf5aeb5f"}, {file = "coverage-7.6.12-cp311-cp311-win_amd64.whl", hash = "sha256:e216c5c45f89ef8971373fd1c5d8d1164b81f7f5f06bbf23c37e7908d19e8558"}, {file = "coverage-7.6.12-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b172f8e030e8ef247b3104902cc671e20df80163b60a203653150d2fc204d1ad"}, {file = "coverage-7.6.12-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:641dfe0ab73deb7069fb972d4d9725bf11c239c309ce694dd50b1473c0f641c3"}, {file = "coverage-7.6.12-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e549f54ac5f301e8e04c569dfdb907f7be71b06b88b5063ce9d6953d2d58574"}, {file = "coverage-7.6.12-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:959244a17184515f8c52dcb65fb662808767c0bd233c1d8a166e7cf74c9ea985"}, {file = "coverage-7.6.12-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bda1c5f347550c359f841d6614fb8ca42ae5cb0b74d39f8a1e204815ebe25750"}, {file = "coverage-7.6.12-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1ceeb90c3eda1f2d8c4c578c14167dbd8c674ecd7d38e45647543f19839dd6ea"}, {file = "coverage-7.6.12-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f16f44025c06792e0fb09571ae454bcc7a3ec75eeb3c36b025eccf501b1a4c3"}, {file = "coverage-7.6.12-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b076e625396e787448d27a411aefff867db2bffac8ed04e8f7056b07024eed5a"}, {file = "coverage-7.6.12-cp312-cp312-win32.whl", hash = "sha256:00b2086892cf06c7c2d74983c9595dc511acca00665480b3ddff749ec4fb2a95"}, {file = "coverage-7.6.12-cp312-cp312-win_amd64.whl", hash = "sha256:7ae6eabf519bc7871ce117fb18bf14e0e343eeb96c377667e3e5dd12095e0288"}, {file = "coverage-7.6.12-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:488c27b3db0ebee97a830e6b5a3ea930c4a6e2c07f27a5e67e1b3532e76b9ef1"}, {file = "coverage-7.6.12-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5d1095bbee1851269f79fd8e0c9b5544e4c00c0c24965e66d8cba2eb5bb535fd"}, {file = "coverage-7.6.12-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0533adc29adf6a69c1baa88c3d7dbcaadcffa21afbed3ca7a225a440e4744bf9"}, {file = "coverage-7.6.12-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:53c56358d470fa507a2b6e67a68fd002364d23c83741dbc4c2e0680d80ca227e"}, {file = "coverage-7.6.12-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64cbb1a3027c79ca6310bf101014614f6e6e18c226474606cf725238cf5bc2d4"}, {file = "coverage-7.6.12-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:79cac3390bfa9836bb795be377395f28410811c9066bc4eefd8015258a7578c6"}, {file = "coverage-7.6.12-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:9b148068e881faa26d878ff63e79650e208e95cf1c22bd3f77c3ca7b1d9821a3"}, {file = "coverage-7.6.12-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8bec2ac5da793c2685ce5319ca9bcf4eee683b8a1679051f8e6ec04c4f2fd7dc"}, {file = "coverage-7.6.12-cp313-cp313-win32.whl", hash = "sha256:200e10beb6ddd7c3ded322a4186313d5ca9e63e33d8fab4faa67ef46d3460af3"}, {file = "coverage-7.6.12-cp313-cp313-win_amd64.whl", hash = "sha256:2b996819ced9f7dbb812c701485d58f261bef08f9b85304d41219b1496b591ef"}, {file = "coverage-7.6.12-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:299cf973a7abff87a30609879c10df0b3bfc33d021e1adabc29138a48888841e"}, {file = "coverage-7.6.12-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:4b467a8c56974bf06e543e69ad803c6865249d7a5ccf6980457ed2bc50312703"}, {file = "coverage-7.6.12-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2458f275944db8129f95d91aee32c828a408481ecde3b30af31d552c2ce284a0"}, {file = "coverage-7.6.12-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a9d8be07fb0832636a0f72b80d2a652fe665e80e720301fb22b191c3434d924"}, {file = "coverage-7.6.12-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14d47376a4f445e9743f6c83291e60adb1b127607a3618e3185bbc8091f0467b"}, {file = "coverage-7.6.12-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:b95574d06aa9d2bd6e5cc35a5bbe35696342c96760b69dc4287dbd5abd4ad51d"}, {file = "coverage-7.6.12-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:ecea0c38c9079570163d663c0433a9af4094a60aafdca491c6a3d248c7432827"}, {file = "coverage-7.6.12-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:2251fabcfee0a55a8578a9d29cecfee5f2de02f11530e7d5c5a05859aa85aee9"}, {file = "coverage-7.6.12-cp313-cp313t-win32.whl", hash = "sha256:eb5507795caabd9b2ae3f1adc95f67b1104971c22c624bb354232d65c4fc90b3"}, {file = "coverage-7.6.12-cp313-cp313t-win_amd64.whl", hash = "sha256:f60a297c3987c6c02ffb29effc70eadcbb412fe76947d394a1091a3615948e2f"}, {file = "coverage-7.6.12-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e7575ab65ca8399c8c4f9a7d61bbd2d204c8b8e447aab9d355682205c9dd948d"}, {file = "coverage-7.6.12-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8161d9fbc7e9fe2326de89cd0abb9f3599bccc1287db0aba285cb68d204ce929"}, {file = "coverage-7.6.12-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a1e465f398c713f1b212400b4e79a09829cd42aebd360362cd89c5bdc44eb87"}, {file = "coverage-7.6.12-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f25d8b92a4e31ff1bd873654ec367ae811b3a943583e05432ea29264782dc32c"}, {file = "coverage-7.6.12-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a936309a65cc5ca80fa9f20a442ff9e2d06927ec9a4f54bcba9c14c066323f2"}, {file = "coverage-7.6.12-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:aa6f302a3a0b5f240ee201297fff0bbfe2fa0d415a94aeb257d8b461032389bd"}, {file = "coverage-7.6.12-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:f973643ef532d4f9be71dd88cf7588936685fdb576d93a79fe9f65bc337d9d73"}, {file = "coverage-7.6.12-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:78f5243bb6b1060aed6213d5107744c19f9571ec76d54c99cc15938eb69e0e86"}, {file = "coverage-7.6.12-cp39-cp39-win32.whl", hash = "sha256:69e62c5034291c845fc4df7f8155e8544178b6c774f97a99e2734b05eb5bed31"}, {file = "coverage-7.6.12-cp39-cp39-win_amd64.whl", hash = "sha256:b01a840ecc25dce235ae4c1b6a0daefb2a203dba0e6e980637ee9c2f6ee0df57"}, {file = "coverage-7.6.12-pp39.pp310-none-any.whl", hash = "sha256:7e39e845c4d764208e7b8f6a21c541ade741e2c41afabdfa1caa28687a3c98cf"}, {file = "coverage-7.6.12-py3-none-any.whl", hash = "sha256:eb8668cfbc279a536c633137deeb9435d2962caec279c3f8cf8b91fff6ff8953"}, {file = "coverage-7.6.12.tar.gz", hash = "sha256:48cfc4641d95d34766ad41d9573cc0f22a48aa88d22657a1fe01dca0dbae4de2"}, ] [package.extras] toml = ["tomli ; python_full_version <= \"3.11.0a6\""] [[package]] name = "docutils" version = "0.21.2" description = "Docutils -- Python Documentation Utilities" optional = false python-versions = ">=3.9" groups = ["dev"] files = [ {file = "docutils-0.21.2-py3-none-any.whl", hash = "sha256:dafca5b9e384f0e419294eb4d2ff9fa826435bf15f15b7bd45723e8ad76811b2"}, {file = "docutils-0.21.2.tar.gz", hash = "sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f"}, ] [[package]] name = "exceptiongroup" version = "1.2.2" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" groups = ["main", "dev"] markers = "python_version < \"3.11\"" files = [ {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, ] [package.extras] test = ["pytest (>=6)"] [[package]] name = "furo" version = "2024.8.6" description = "A clean customisable Sphinx documentation theme." optional = false python-versions = ">=3.8" groups = ["dev"] files = [ {file = "furo-2024.8.6-py3-none-any.whl", hash = "sha256:6cd97c58b47813d3619e63e9081169880fbe331f0ca883c871ff1f3f11814f5c"}, {file = "furo-2024.8.6.tar.gz", hash = "sha256:b63e4cee8abfc3136d3bc03a3d45a76a850bada4d6374d24c1716b0e01394a01"}, ] [package.dependencies] beautifulsoup4 = "*" pygments = ">=2.7" sphinx = ">=6.0,<9.0" sphinx-basic-ng = ">=1.0.0.beta2" [[package]] name = "h11" version = "0.14.0" description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" optional = false python-versions = ">=3.7" groups = ["main", "dev"] files = [ {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, ] [[package]] name = "h2" version = "4.2.0" description = "Pure-Python HTTP/2 protocol implementation" optional = false python-versions = ">=3.9" groups = ["main"] files = [ {file = "h2-4.2.0-py3-none-any.whl", hash = "sha256:479a53ad425bb29af087f3458a61d30780bc818e4ebcf01f0b536ba916462ed0"}, {file = "h2-4.2.0.tar.gz", hash = "sha256:c8a52129695e88b1a0578d8d2cc6842bbd79128ac685463b887ee278126ad01f"}, ] [package.dependencies] hpack = ">=4.1,<5" hyperframe = ">=6.1,<7" [[package]] name = "hpack" version = "4.1.0" description = "Pure-Python HPACK header encoding" optional = false python-versions = ">=3.9" groups = ["main"] files = [ {file = "hpack-4.1.0-py3-none-any.whl", hash = "sha256:157ac792668d995c657d93111f46b4535ed114f0c9c8d672271bbec7eae1b496"}, {file = "hpack-4.1.0.tar.gz", hash = "sha256:ec5eca154f7056aa06f196a557655c5b009b382873ac8d1e66e79e87535f1dca"}, ] [[package]] name = "httpcore" version = "1.0.7" description = "A minimal low-level HTTP client." optional = false python-versions = ">=3.8" groups = ["main"] files = [ {file = "httpcore-1.0.7-py3-none-any.whl", hash = "sha256:a3fff8f43dc260d5bd363d9f9cf1830fa3a458b332856f34282de498ed420edd"}, {file = "httpcore-1.0.7.tar.gz", hash = "sha256:8551cb62a169ec7162ac7be8d4817d561f60e08eaa485234898414bb5a8a0b4c"}, ] [package.dependencies] certifi = "*" h11 = ">=0.13,<0.15" [package.extras] asyncio = ["anyio (>=4.0,<5.0)"] http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] trio = ["trio (>=0.22.0,<1.0)"] [[package]] name = "httpx" version = "0.28.1" description = "The next generation HTTP client." optional = false python-versions = ">=3.8" groups = ["main"] files = [ {file = "httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad"}, {file = "httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc"}, ] [package.dependencies] anyio = "*" certifi = "*" h2 = {version = ">=3,<5", optional = true, markers = "extra == \"http2\""} httpcore = "==1.*" idna = "*" [package.extras] brotli = ["brotli ; platform_python_implementation == \"CPython\"", "brotlicffi ; platform_python_implementation != \"CPython\""] cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] zstd = ["zstandard (>=0.18.0)"] [[package]] name = "hyperframe" version = "6.1.0" description = "Pure-Python HTTP/2 framing" optional = false python-versions = ">=3.9" groups = ["main"] files = [ {file = "hyperframe-6.1.0-py3-none-any.whl", hash = "sha256:b03380493a519fce58ea5af42e4a42317bf9bd425596f7a0835ffce80f1a42e5"}, {file = "hyperframe-6.1.0.tar.gz", hash = "sha256:f630908a00854a7adeabd6382b43923a4c4cd4b821fcb527e6ab9e15382a3b08"}, ] [[package]] name = "idna" version = "3.10" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.6" groups = ["main", "dev"] files = [ {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, ] [package.extras] all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] [[package]] name = "imagesize" version = "1.4.1" description = "Getting image size from png/jpeg/jpeg2000/gif file" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" groups = ["dev"] files = [ {file = "imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b"}, {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"}, ] [[package]] name = "importlib-metadata" version = "8.6.1" description = "Read metadata from Python packages" optional = false python-versions = ">=3.9" groups = ["dev"] markers = "python_version < \"3.10\"" files = [ {file = "importlib_metadata-8.6.1-py3-none-any.whl", hash = "sha256:02a89390c1e15fdfdc0d7c6b25cb3e62650d0494005c97d6f148bf5b9787525e"}, {file = "importlib_metadata-8.6.1.tar.gz", hash = "sha256:310b41d755445d74569f993ccfc22838295d9fe005425094fad953d7f15c8580"}, ] [package.dependencies] zipp = ">=3.20" [package.extras] check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] enabler = ["pytest-enabler (>=2.2)"] perf = ["ipython"] test = ["flufl.flake8", "importlib_resources (>=1.3) ; python_version < \"3.9\"", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] type = ["pytest-mypy"] [[package]] name = "jinja2" version = "3.1.6" description = "A very fast and expressive template engine." optional = false python-versions = ">=3.7" groups = ["dev"] files = [ {file = "jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67"}, {file = "jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d"}, ] [package.dependencies] MarkupSafe = ">=2.0" [package.extras] i18n = ["Babel (>=2.7)"] [[package]] name = "lxml" version = "5.3.1" description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." optional = false python-versions = ">=3.6" groups = ["main"] files = [ {file = "lxml-5.3.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a4058f16cee694577f7e4dd410263cd0ef75644b43802a689c2b3c2a7e69453b"}, {file = "lxml-5.3.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:364de8f57d6eda0c16dcfb999af902da31396949efa0e583e12675d09709881b"}, {file = "lxml-5.3.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:528f3a0498a8edc69af0559bdcf8a9f5a8bf7c00051a6ef3141fdcf27017bbf5"}, {file = "lxml-5.3.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:db4743e30d6f5f92b6d2b7c86b3ad250e0bad8dee4b7ad8a0c44bfb276af89a3"}, {file = "lxml-5.3.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:17b5d7f8acf809465086d498d62a981fa6a56d2718135bb0e4aa48c502055f5c"}, {file = "lxml-5.3.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:928e75a7200a4c09e6efc7482a1337919cc61fe1ba289f297827a5b76d8969c2"}, {file = "lxml-5.3.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a997b784a639e05b9d4053ef3b20c7e447ea80814a762f25b8ed5a89d261eac"}, {file = "lxml-5.3.1-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:7b82e67c5feb682dbb559c3e6b78355f234943053af61606af126df2183b9ef9"}, {file = "lxml-5.3.1-cp310-cp310-manylinux_2_28_ppc64le.whl", hash = "sha256:f1de541a9893cf8a1b1db9bf0bf670a2decab42e3e82233d36a74eda7822b4c9"}, {file = "lxml-5.3.1-cp310-cp310-manylinux_2_28_s390x.whl", hash = "sha256:de1fc314c3ad6bc2f6bd5b5a5b9357b8c6896333d27fdbb7049aea8bd5af2d79"}, {file = "lxml-5.3.1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:7c0536bd9178f754b277a3e53f90f9c9454a3bd108b1531ffff720e082d824f2"}, {file = "lxml-5.3.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:68018c4c67d7e89951a91fbd371e2e34cd8cfc71f0bb43b5332db38497025d51"}, {file = "lxml-5.3.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:aa826340a609d0c954ba52fd831f0fba2a4165659ab0ee1a15e4aac21f302406"}, {file = "lxml-5.3.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:796520afa499732191e39fc95b56a3b07f95256f2d22b1c26e217fb69a9db5b5"}, {file = "lxml-5.3.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3effe081b3135237da6e4c4530ff2a868d3f80be0bda027e118a5971285d42d0"}, {file = "lxml-5.3.1-cp310-cp310-win32.whl", hash = "sha256:a22f66270bd6d0804b02cd49dae2b33d4341015545d17f8426f2c4e22f557a23"}, {file = "lxml-5.3.1-cp310-cp310-win_amd64.whl", hash = "sha256:0bcfadea3cdc68e678d2b20cb16a16716887dd00a881e16f7d806c2138b8ff0c"}, {file = "lxml-5.3.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e220f7b3e8656ab063d2eb0cd536fafef396829cafe04cb314e734f87649058f"}, {file = "lxml-5.3.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0f2cfae0688fd01f7056a17367e3b84f37c545fb447d7282cf2c242b16262607"}, {file = "lxml-5.3.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:67d2f8ad9dcc3a9e826bdc7802ed541a44e124c29b7d95a679eeb58c1c14ade8"}, {file = "lxml-5.3.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:db0c742aad702fd5d0c6611a73f9602f20aec2007c102630c06d7633d9c8f09a"}, {file = "lxml-5.3.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:198bb4b4dd888e8390afa4f170d4fa28467a7eaf857f1952589f16cfbb67af27"}, {file = "lxml-5.3.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d2a3e412ce1849be34b45922bfef03df32d1410a06d1cdeb793a343c2f1fd666"}, {file = "lxml-5.3.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2b8969dbc8d09d9cd2ae06362c3bad27d03f433252601ef658a49bd9f2b22d79"}, {file = "lxml-5.3.1-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:5be8f5e4044146a69c96077c7e08f0709c13a314aa5315981185c1f00235fe65"}, {file = "lxml-5.3.1-cp311-cp311-manylinux_2_28_ppc64le.whl", hash = "sha256:133f3493253a00db2c870d3740bc458ebb7d937bd0a6a4f9328373e0db305709"}, {file = "lxml-5.3.1-cp311-cp311-manylinux_2_28_s390x.whl", hash = "sha256:52d82b0d436edd6a1d22d94a344b9a58abd6c68c357ed44f22d4ba8179b37629"}, {file = "lxml-5.3.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:1b6f92e35e2658a5ed51c6634ceb5ddae32053182851d8cad2a5bc102a359b33"}, {file = "lxml-5.3.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:203b1d3eaebd34277be06a3eb880050f18a4e4d60861efba4fb946e31071a295"}, {file = "lxml-5.3.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:155e1a5693cf4b55af652f5c0f78ef36596c7f680ff3ec6eb4d7d85367259b2c"}, {file = "lxml-5.3.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:22ec2b3c191f43ed21f9545e9df94c37c6b49a5af0a874008ddc9132d49a2d9c"}, {file = "lxml-5.3.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:7eda194dd46e40ec745bf76795a7cccb02a6a41f445ad49d3cf66518b0bd9cff"}, {file = "lxml-5.3.1-cp311-cp311-win32.whl", hash = "sha256:fb7c61d4be18e930f75948705e9718618862e6fc2ed0d7159b2262be73f167a2"}, {file = "lxml-5.3.1-cp311-cp311-win_amd64.whl", hash = "sha256:c809eef167bf4a57af4b03007004896f5c60bd38dc3852fcd97a26eae3d4c9e6"}, {file = "lxml-5.3.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:e69add9b6b7b08c60d7ff0152c7c9a6c45b4a71a919be5abde6f98f1ea16421c"}, {file = "lxml-5.3.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:4e52e1b148867b01c05e21837586ee307a01e793b94072d7c7b91d2c2da02ffe"}, {file = "lxml-5.3.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a4b382e0e636ed54cd278791d93fe2c4f370772743f02bcbe431a160089025c9"}, {file = "lxml-5.3.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c2e49dc23a10a1296b04ca9db200c44d3eb32c8d8ec532e8c1fd24792276522a"}, {file = "lxml-5.3.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4399b4226c4785575fb20998dc571bc48125dc92c367ce2602d0d70e0c455eb0"}, {file = "lxml-5.3.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5412500e0dc5481b1ee9cf6b38bb3b473f6e411eb62b83dc9b62699c3b7b79f7"}, {file = "lxml-5.3.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c93ed3c998ea8472be98fb55aed65b5198740bfceaec07b2eba551e55b7b9ae"}, {file = "lxml-5.3.1-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:63d57fc94eb0bbb4735e45517afc21ef262991d8758a8f2f05dd6e4174944519"}, {file = "lxml-5.3.1-cp312-cp312-manylinux_2_28_ppc64le.whl", hash = "sha256:b450d7cabcd49aa7ab46a3c6aa3ac7e1593600a1a0605ba536ec0f1b99a04322"}, {file = "lxml-5.3.1-cp312-cp312-manylinux_2_28_s390x.whl", hash = "sha256:4df0ec814b50275ad6a99bc82a38b59f90e10e47714ac9871e1b223895825468"}, {file = "lxml-5.3.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:d184f85ad2bb1f261eac55cddfcf62a70dee89982c978e92b9a74a1bfef2e367"}, {file = "lxml-5.3.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b725e70d15906d24615201e650d5b0388b08a5187a55f119f25874d0103f90dd"}, {file = "lxml-5.3.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:a31fa7536ec1fb7155a0cd3a4e3d956c835ad0a43e3610ca32384d01f079ea1c"}, {file = "lxml-5.3.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:3c3c8b55c7fc7b7e8877b9366568cc73d68b82da7fe33d8b98527b73857a225f"}, {file = "lxml-5.3.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d61ec60945d694df806a9aec88e8f29a27293c6e424f8ff91c80416e3c617645"}, {file = "lxml-5.3.1-cp312-cp312-win32.whl", hash = "sha256:f4eac0584cdc3285ef2e74eee1513a6001681fd9753b259e8159421ed28a72e5"}, {file = "lxml-5.3.1-cp312-cp312-win_amd64.whl", hash = "sha256:29bfc8d3d88e56ea0a27e7c4897b642706840247f59f4377d81be8f32aa0cfbf"}, {file = "lxml-5.3.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:c093c7088b40d8266f57ed71d93112bd64c6724d31f0794c1e52cc4857c28e0e"}, {file = "lxml-5.3.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b0884e3f22d87c30694e625b1e62e6f30d39782c806287450d9dc2fdf07692fd"}, {file = "lxml-5.3.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1637fa31ec682cd5760092adfabe86d9b718a75d43e65e211d5931809bc111e7"}, {file = "lxml-5.3.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a364e8e944d92dcbf33b6b494d4e0fb3499dcc3bd9485beb701aa4b4201fa414"}, {file = "lxml-5.3.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:779e851fd0e19795ccc8a9bb4d705d6baa0ef475329fe44a13cf1e962f18ff1e"}, {file = "lxml-5.3.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c4393600915c308e546dc7003d74371744234e8444a28622d76fe19b98fa59d1"}, {file = "lxml-5.3.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:673b9d8e780f455091200bba8534d5f4f465944cbdd61f31dc832d70e29064a5"}, {file = "lxml-5.3.1-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:2e4a570f6a99e96c457f7bec5ad459c9c420ee80b99eb04cbfcfe3fc18ec6423"}, {file = "lxml-5.3.1-cp313-cp313-manylinux_2_28_ppc64le.whl", hash = "sha256:71f31eda4e370f46af42fc9f264fafa1b09f46ba07bdbee98f25689a04b81c20"}, {file = "lxml-5.3.1-cp313-cp313-manylinux_2_28_s390x.whl", hash = "sha256:42978a68d3825eaac55399eb37a4d52012a205c0c6262199b8b44fcc6fd686e8"}, {file = "lxml-5.3.1-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:8b1942b3e4ed9ed551ed3083a2e6e0772de1e5e3aca872d955e2e86385fb7ff9"}, {file = "lxml-5.3.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:85c4f11be9cf08917ac2a5a8b6e1ef63b2f8e3799cec194417e76826e5f1de9c"}, {file = "lxml-5.3.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:231cf4d140b22a923b1d0a0a4e0b4f972e5893efcdec188934cc65888fd0227b"}, {file = "lxml-5.3.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:5865b270b420eda7b68928d70bb517ccbe045e53b1a428129bb44372bf3d7dd5"}, {file = "lxml-5.3.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:dbf7bebc2275016cddf3c997bf8a0f7044160714c64a9b83975670a04e6d2252"}, {file = "lxml-5.3.1-cp313-cp313-win32.whl", hash = "sha256:d0751528b97d2b19a388b302be2a0ee05817097bab46ff0ed76feeec24951f78"}, {file = "lxml-5.3.1-cp313-cp313-win_amd64.whl", hash = "sha256:91fb6a43d72b4f8863d21f347a9163eecbf36e76e2f51068d59cd004c506f332"}, {file = "lxml-5.3.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:016b96c58e9a4528219bb563acf1aaaa8bc5452e7651004894a973f03b84ba81"}, {file = "lxml-5.3.1-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82a4bb10b0beef1434fb23a09f001ab5ca87895596b4581fd53f1e5145a8934a"}, {file = "lxml-5.3.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d68eeef7b4d08a25e51897dac29bcb62aba830e9ac6c4e3297ee7c6a0cf6439"}, {file = "lxml-5.3.1-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:f12582b8d3b4c6be1d298c49cb7ae64a3a73efaf4c2ab4e37db182e3545815ac"}, {file = "lxml-5.3.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:2df7ed5edeb6bd5590914cd61df76eb6cce9d590ed04ec7c183cf5509f73530d"}, {file = "lxml-5.3.1-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:585c4dc429deebc4307187d2b71ebe914843185ae16a4d582ee030e6cfbb4d8a"}, {file = "lxml-5.3.1-cp36-cp36m-win32.whl", hash = "sha256:06a20d607a86fccab2fc15a77aa445f2bdef7b49ec0520a842c5c5afd8381576"}, {file = "lxml-5.3.1-cp36-cp36m-win_amd64.whl", hash = "sha256:057e30d0012439bc54ca427a83d458752ccda725c1c161cc283db07bcad43cf9"}, {file = "lxml-5.3.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4867361c049761a56bd21de507cab2c2a608c55102311d142ade7dab67b34f32"}, {file = "lxml-5.3.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3dddf0fb832486cc1ea71d189cb92eb887826e8deebe128884e15020bb6e3f61"}, {file = "lxml-5.3.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bcc211542f7af6f2dfb705f5f8b74e865592778e6cafdfd19c792c244ccce19"}, {file = "lxml-5.3.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aaca5a812f050ab55426c32177091130b1e49329b3f002a32934cd0245571307"}, {file = "lxml-5.3.1-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:236610b77589faf462337b3305a1be91756c8abc5a45ff7ca8f245a71c5dab70"}, {file = "lxml-5.3.1-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:aed57b541b589fa05ac248f4cb1c46cbb432ab82cbd467d1c4f6a2bdc18aecf9"}, {file = "lxml-5.3.1-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:75fa3d6946d317ffc7016a6fcc44f42db6d514b7fdb8b4b28cbe058303cb6e53"}, {file = "lxml-5.3.1-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:96eef5b9f336f623ffc555ab47a775495e7e8846dde88de5f941e2906453a1ce"}, {file = "lxml-5.3.1-cp37-cp37m-win32.whl", hash = "sha256:ef45f31aec9be01379fc6c10f1d9c677f032f2bac9383c827d44f620e8a88407"}, {file = "lxml-5.3.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a0611da6b07dd3720f492db1b463a4d1175b096b49438761cc9f35f0d9eaaef5"}, {file = "lxml-5.3.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b2aca14c235c7a08558fe0a4786a1a05873a01e86b474dfa8f6df49101853a4e"}, {file = "lxml-5.3.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae82fce1d964f065c32c9517309f0c7be588772352d2f40b1574a214bd6e6098"}, {file = "lxml-5.3.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7aae7a3d63b935babfdc6864b31196afd5145878ddd22f5200729006366bc4d5"}, {file = "lxml-5.3.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e8e0d177b1fe251c3b1b914ab64135475c5273c8cfd2857964b2e3bb0fe196a7"}, {file = "lxml-5.3.1-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:6c4dd3bfd0c82400060896717dd261137398edb7e524527438c54a8c34f736bf"}, {file = "lxml-5.3.1-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:f1208c1c67ec9e151d78aa3435aa9b08a488b53d9cfac9b699f15255a3461ef2"}, {file = "lxml-5.3.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:c6aacf00d05b38a5069826e50ae72751cb5bc27bdc4d5746203988e429b385bb"}, {file = "lxml-5.3.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:5881aaa4bf3a2d086c5f20371d3a5856199a0d8ac72dd8d0dbd7a2ecfc26ab73"}, {file = "lxml-5.3.1-cp38-cp38-win32.whl", hash = "sha256:45fbb70ccbc8683f2fb58bea89498a7274af1d9ec7995e9f4af5604e028233fc"}, {file = "lxml-5.3.1-cp38-cp38-win_amd64.whl", hash = "sha256:7512b4d0fc5339d5abbb14d1843f70499cab90d0b864f790e73f780f041615d7"}, {file = "lxml-5.3.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5885bc586f1edb48e5d68e7a4b4757b5feb2a496b64f462b4d65950f5af3364f"}, {file = "lxml-5.3.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1b92fe86e04f680b848fff594a908edfa72b31bfc3499ef7433790c11d4c8cd8"}, {file = "lxml-5.3.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a091026c3bf7519ab1e64655a3f52a59ad4a4e019a6f830c24d6430695b1cf6a"}, {file = "lxml-5.3.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8ffb141361108e864ab5f1813f66e4e1164181227f9b1f105b042729b6c15125"}, {file = "lxml-5.3.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3715cdf0dd31b836433af9ee9197af10e3df41d273c19bb249230043667a5dfd"}, {file = "lxml-5.3.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:88b72eb7222d918c967202024812c2bfb4048deeb69ca328363fb8e15254c549"}, {file = "lxml-5.3.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa59974880ab5ad8ef3afaa26f9bda148c5f39e06b11a8ada4660ecc9fb2feb3"}, {file = "lxml-5.3.1-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:3bb8149840daf2c3f97cebf00e4ed4a65a0baff888bf2605a8d0135ff5cf764e"}, {file = "lxml-5.3.1-cp39-cp39-manylinux_2_28_ppc64le.whl", hash = "sha256:0d6b2fa86becfa81f0a0271ccb9eb127ad45fb597733a77b92e8a35e53414914"}, {file = "lxml-5.3.1-cp39-cp39-manylinux_2_28_s390x.whl", hash = "sha256:136bf638d92848a939fd8f0e06fcf92d9f2e4b57969d94faae27c55f3d85c05b"}, {file = "lxml-5.3.1-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:89934f9f791566e54c1d92cdc8f8fd0009447a5ecdb1ec6b810d5f8c4955f6be"}, {file = "lxml-5.3.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:a8ade0363f776f87f982572c2860cc43c65ace208db49c76df0a21dde4ddd16e"}, {file = "lxml-5.3.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:bfbbab9316330cf81656fed435311386610f78b6c93cc5db4bebbce8dd146675"}, {file = "lxml-5.3.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:172d65f7c72a35a6879217bcdb4bb11bc88d55fb4879e7569f55616062d387c2"}, {file = "lxml-5.3.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:e3c623923967f3e5961d272718655946e5322b8d058e094764180cdee7bab1af"}, {file = "lxml-5.3.1-cp39-cp39-win32.whl", hash = "sha256:ce0930a963ff593e8bb6fda49a503911accc67dee7e5445eec972668e672a0f0"}, {file = "lxml-5.3.1-cp39-cp39-win_amd64.whl", hash = "sha256:f7b64fcd670bca8800bc10ced36620c6bbb321e7bc1214b9c0c0df269c1dddc2"}, {file = "lxml-5.3.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:afa578b6524ff85fb365f454cf61683771d0170470c48ad9d170c48075f86725"}, {file = "lxml-5.3.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:67f5e80adf0aafc7b5454f2c1cb0cde920c9b1f2cbd0485f07cc1d0497c35c5d"}, {file = "lxml-5.3.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2dd0b80ac2d8f13ffc906123a6f20b459cb50a99222d0da492360512f3e50f84"}, {file = "lxml-5.3.1-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:422c179022ecdedbe58b0e242607198580804253da220e9454ffe848daa1cfd2"}, {file = "lxml-5.3.1-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:524ccfded8989a6595dbdda80d779fb977dbc9a7bc458864fc9a0c2fc15dc877"}, {file = "lxml-5.3.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:48fd46bf7155def2e15287c6f2b133a2f78e2d22cdf55647269977b873c65499"}, {file = "lxml-5.3.1-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:05123fad495a429f123307ac6d8fd6f977b71e9a0b6d9aeeb8f80c017cb17131"}, {file = "lxml-5.3.1-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a243132767150a44e6a93cd1dde41010036e1cbc63cc3e9fe1712b277d926ce3"}, {file = "lxml-5.3.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c92ea6d9dd84a750b2bae72ff5e8cf5fdd13e58dda79c33e057862c29a8d5b50"}, {file = "lxml-5.3.1-pp37-pypy37_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:2f1be45d4c15f237209bbf123a0e05b5d630c8717c42f59f31ea9eae2ad89394"}, {file = "lxml-5.3.1-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:a83d3adea1e0ee36dac34627f78ddd7f093bb9cfc0a8e97f1572a949b695cb98"}, {file = "lxml-5.3.1-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:3edbb9c9130bac05d8c3fe150c51c337a471cc7fdb6d2a0a7d3a88e88a829314"}, {file = "lxml-5.3.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:2f23cf50eccb3255b6e913188291af0150d89dab44137a69e14e4dcb7be981f1"}, {file = "lxml-5.3.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df7e5edac4778127f2bf452e0721a58a1cfa4d1d9eac63bdd650535eb8543615"}, {file = "lxml-5.3.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:094b28ed8a8a072b9e9e2113a81fda668d2053f2ca9f2d202c2c8c7c2d6516b1"}, {file = "lxml-5.3.1-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:514fe78fc4b87e7a7601c92492210b20a1b0c6ab20e71e81307d9c2e377c64de"}, {file = "lxml-5.3.1-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:8fffc08de02071c37865a155e5ea5fce0282e1546fd5bde7f6149fcaa32558ac"}, {file = "lxml-5.3.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:4b0d5cdba1b655d5b18042ac9c9ff50bda33568eb80feaaca4fc237b9c4fbfde"}, {file = "lxml-5.3.1-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:3031e4c16b59424e8d78522c69b062d301d951dc55ad8685736c3335a97fc270"}, {file = "lxml-5.3.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb659702a45136c743bc130760c6f137870d4df3a9e14386478b8a0511abcfca"}, {file = "lxml-5.3.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a11b16a33656ffc43c92a5343a28dc71eefe460bcc2a4923a96f292692709f6"}, {file = "lxml-5.3.1-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c5ae125276f254b01daa73e2c103363d3e99e3e10505686ac7d9d2442dd4627a"}, {file = "lxml-5.3.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c76722b5ed4a31ba103e0dc77ab869222ec36efe1a614e42e9bcea88a36186fe"}, {file = "lxml-5.3.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:33e06717c00c788ab4e79bc4726ecc50c54b9bfb55355eae21473c145d83c2d2"}, {file = "lxml-5.3.1.tar.gz", hash = "sha256:106b7b5d2977b339f1e97efe2778e2ab20e99994cbb0ec5e55771ed0795920c8"}, ] [package.extras] cssselect = ["cssselect (>=0.7)"] html-clean = ["lxml_html_clean"] html5 = ["html5lib"] htmlsoup = ["BeautifulSoup4"] source = ["Cython (>=3.0.11,<3.1.0)"] [[package]] name = "markdown-it-py" version = "3.0.0" description = "Python port of markdown-it. Markdown parsing, done right!" optional = false python-versions = ">=3.8" groups = ["main", "dev"] files = [ {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, ] [package.dependencies] mdurl = ">=0.1,<1.0" [package.extras] benchmarking = ["psutil", "pytest", "pytest-benchmark"] code-style = ["pre-commit (>=3.0,<4.0)"] compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"] linkify = ["linkify-it-py (>=1,<3)"] plugins = ["mdit-py-plugins"] profiling = ["gprof2dot"] rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] [[package]] name = "markupsafe" version = "3.0.2" description = "Safely add untrusted strings to HTML/XML markup." optional = false python-versions = ">=3.9" groups = ["dev"] files = [ {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"}, {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"}, {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579"}, {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d"}, {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb"}, {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b"}, {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c"}, {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171"}, {file = "MarkupSafe-3.0.2-cp310-cp310-win32.whl", hash = "sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50"}, {file = "MarkupSafe-3.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a"}, {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d"}, {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93"}, {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832"}, {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84"}, {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca"}, {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798"}, {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e"}, {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4"}, {file = "MarkupSafe-3.0.2-cp311-cp311-win32.whl", hash = "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d"}, {file = "MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b"}, {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf"}, {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225"}, {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028"}, {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8"}, {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c"}, {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557"}, {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22"}, {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48"}, {file = "MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30"}, {file = "MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87"}, {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd"}, {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430"}, {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094"}, {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396"}, {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79"}, {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a"}, {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca"}, {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c"}, {file = "MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1"}, {file = "MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f"}, {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c"}, {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb"}, {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c"}, {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d"}, {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe"}, {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5"}, {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a"}, {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9"}, {file = "MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6"}, {file = "MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f"}, {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:eaa0a10b7f72326f1372a713e73c3f739b524b3af41feb43e4921cb529f5929a"}, {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:48032821bbdf20f5799ff537c7ac3d1fba0ba032cfc06194faffa8cda8b560ff"}, {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a9d3f5f0901fdec14d8d2f66ef7d035f2157240a433441719ac9a3fba440b13"}, {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88b49a3b9ff31e19998750c38e030fc7bb937398b1f78cfa599aaef92d693144"}, {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cfad01eed2c2e0c01fd0ecd2ef42c492f7f93902e39a42fc9ee1692961443a29"}, {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1225beacc926f536dc82e45f8a4d68502949dc67eea90eab715dea3a21c1b5f0"}, {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3169b1eefae027567d1ce6ee7cae382c57fe26e82775f460f0b2778beaad66c0"}, {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:eb7972a85c54febfb25b5c4b4f3af4dcc731994c7da0d8a0b4a6eb0640e1d178"}, {file = "MarkupSafe-3.0.2-cp39-cp39-win32.whl", hash = "sha256:8c4e8c3ce11e1f92f6536ff07154f9d49677ebaaafc32db9db4620bc11ed480f"}, {file = "MarkupSafe-3.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:6e296a513ca3d94054c2c881cc913116e90fd030ad1c656b3869762b754f5f8a"}, {file = "markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0"}, ] [[package]] name = "mdit-py-plugins" version = "0.4.2" description = "Collection of plugins for markdown-it-py" optional = false python-versions = ">=3.8" groups = ["dev"] files = [ {file = "mdit_py_plugins-0.4.2-py3-none-any.whl", hash = "sha256:0c673c3f889399a33b95e88d2f0d111b4447bdfea7f237dab2d488f459835636"}, {file = "mdit_py_plugins-0.4.2.tar.gz", hash = "sha256:5f2cd1fdb606ddf152d37ec30e46101a60512bc0e5fa1a7002c36647b09e26b5"}, ] [package.dependencies] markdown-it-py = ">=1.0.0,<4.0.0" [package.extras] code-style = ["pre-commit"] rtd = ["myst-parser", "sphinx-book-theme"] testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] [[package]] name = "mdurl" version = "0.1.2" description = "Markdown URL utilities" optional = false python-versions = ">=3.7" groups = ["main", "dev"] files = [ {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, ] [[package]] name = "mypy" version = "1.15.0" description = "Optional static typing for Python" optional = false python-versions = ">=3.9" groups = ["dev"] files = [ {file = "mypy-1.15.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:979e4e1a006511dacf628e36fadfecbcc0160a8af6ca7dad2f5025529e082c13"}, {file = "mypy-1.15.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c4bb0e1bd29f7d34efcccd71cf733580191e9a264a2202b0239da95984c5b559"}, {file = "mypy-1.15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:be68172e9fd9ad8fb876c6389f16d1c1b5f100ffa779f77b1fb2176fcc9ab95b"}, {file = "mypy-1.15.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c7be1e46525adfa0d97681432ee9fcd61a3964c2446795714699a998d193f1a3"}, {file = "mypy-1.15.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:2e2c2e6d3593f6451b18588848e66260ff62ccca522dd231cd4dd59b0160668b"}, {file = "mypy-1.15.0-cp310-cp310-win_amd64.whl", hash = "sha256:6983aae8b2f653e098edb77f893f7b6aca69f6cffb19b2cc7443f23cce5f4828"}, {file = "mypy-1.15.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2922d42e16d6de288022e5ca321cd0618b238cfc5570e0263e5ba0a77dbef56f"}, {file = "mypy-1.15.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2ee2d57e01a7c35de00f4634ba1bbf015185b219e4dc5909e281016df43f5ee5"}, {file = "mypy-1.15.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:973500e0774b85d9689715feeffcc980193086551110fd678ebe1f4342fb7c5e"}, {file = "mypy-1.15.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5a95fb17c13e29d2d5195869262f8125dfdb5c134dc8d9a9d0aecf7525b10c2c"}, {file = "mypy-1.15.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1905f494bfd7d85a23a88c5d97840888a7bd516545fc5aaedff0267e0bb54e2f"}, {file = "mypy-1.15.0-cp311-cp311-win_amd64.whl", hash = "sha256:c9817fa23833ff189db061e6d2eff49b2f3b6ed9856b4a0a73046e41932d744f"}, {file = "mypy-1.15.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:aea39e0583d05124836ea645f412e88a5c7d0fd77a6d694b60d9b6b2d9f184fd"}, {file = "mypy-1.15.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2f2147ab812b75e5b5499b01ade1f4a81489a147c01585cda36019102538615f"}, {file = "mypy-1.15.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ce436f4c6d218a070048ed6a44c0bbb10cd2cc5e272b29e7845f6a2f57ee4464"}, {file = "mypy-1.15.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8023ff13985661b50a5928fc7a5ca15f3d1affb41e5f0a9952cb68ef090b31ee"}, {file = "mypy-1.15.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1124a18bc11a6a62887e3e137f37f53fbae476dc36c185d549d4f837a2a6a14e"}, {file = "mypy-1.15.0-cp312-cp312-win_amd64.whl", hash = "sha256:171a9ca9a40cd1843abeca0e405bc1940cd9b305eaeea2dda769ba096932bb22"}, {file = "mypy-1.15.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:93faf3fdb04768d44bf28693293f3904bbb555d076b781ad2530214ee53e3445"}, {file = "mypy-1.15.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:811aeccadfb730024c5d3e326b2fbe9249bb7413553f15499a4050f7c30e801d"}, {file = "mypy-1.15.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:98b7b9b9aedb65fe628c62a6dc57f6d5088ef2dfca37903a7d9ee374d03acca5"}, {file = "mypy-1.15.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c43a7682e24b4f576d93072216bf56eeff70d9140241f9edec0c104d0c515036"}, {file = "mypy-1.15.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:baefc32840a9f00babd83251560e0ae1573e2f9d1b067719479bfb0e987c6357"}, {file = "mypy-1.15.0-cp313-cp313-win_amd64.whl", hash = "sha256:b9378e2c00146c44793c98b8d5a61039a048e31f429fb0eb546d93f4b000bedf"}, {file = "mypy-1.15.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e601a7fa172c2131bff456bb3ee08a88360760d0d2f8cbd7a75a65497e2df078"}, {file = "mypy-1.15.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:712e962a6357634fef20412699a3655c610110e01cdaa6180acec7fc9f8513ba"}, {file = "mypy-1.15.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f95579473af29ab73a10bada2f9722856792a36ec5af5399b653aa28360290a5"}, {file = "mypy-1.15.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8f8722560a14cde92fdb1e31597760dc35f9f5524cce17836c0d22841830fd5b"}, {file = "mypy-1.15.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1fbb8da62dc352133d7d7ca90ed2fb0e9d42bb1a32724c287d3c76c58cbaa9c2"}, {file = "mypy-1.15.0-cp39-cp39-win_amd64.whl", hash = "sha256:d10d994b41fb3497719bbf866f227b3489048ea4bbbb5015357db306249f7980"}, {file = "mypy-1.15.0-py3-none-any.whl", hash = "sha256:5469affef548bd1895d86d3bf10ce2b44e33d86923c29e4d675b3e323437ea3e"}, {file = "mypy-1.15.0.tar.gz", hash = "sha256:404534629d51d3efea5c800ee7c42b72a6554d6c400e6a79eafe15d11341fd43"}, ] [package.dependencies] mypy_extensions = ">=1.0.0" tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} typing_extensions = ">=4.6.0" [package.extras] dmypy = ["psutil (>=4.0)"] faster-cache = ["orjson"] install-types = ["pip"] mypyc = ["setuptools (>=50)"] reports = ["lxml"] [[package]] name = "mypy-extensions" version = "1.0.0" description = "Type system extensions for programs checked with the mypy type checker." optional = false python-versions = ">=3.5" groups = ["dev"] files = [ {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, ] [[package]] name = "myst-parser" version = "3.0.1" description = "An extended [CommonMark](https://spec.commonmark.org/) compliant parser," optional = false python-versions = ">=3.8" groups = ["dev"] files = [ {file = "myst_parser-3.0.1-py3-none-any.whl", hash = "sha256:6457aaa33a5d474aca678b8ead9b3dc298e89c68e67012e73146ea6fd54babf1"}, {file = "myst_parser-3.0.1.tar.gz", hash = "sha256:88f0cb406cb363b077d176b51c476f62d60604d68a8dcdf4832e080441301a87"}, ] [package.dependencies] docutils = ">=0.18,<0.22" jinja2 = "*" markdown-it-py = ">=3.0,<4.0" mdit-py-plugins = ">=0.4,<1.0" pyyaml = "*" sphinx = ">=6,<8" [package.extras] code-style = ["pre-commit (>=3.0,<4.0)"] linkify = ["linkify-it-py (>=2.0,<3.0)"] rtd = ["ipython", "sphinx (>=7)", "sphinx-autodoc2 (>=0.5.0,<0.6.0)", "sphinx-book-theme (>=1.1,<2.0)", "sphinx-copybutton", "sphinx-design", "sphinx-pyscript", "sphinx-tippy (>=0.4.3)", "sphinx-togglebutton", "sphinxext-opengraph (>=0.9.0,<0.10.0)", "sphinxext-rediraffe (>=0.2.7,<0.3.0)"] testing = ["beautifulsoup4", "coverage[toml]", "defusedxml", "pytest (>=8,<9)", "pytest-cov", "pytest-param-files (>=0.6.0,<0.7.0)", "pytest-regressions", "sphinx-pytest"] testing-docutils = ["pygments", "pytest (>=8,<9)", "pytest-param-files (>=0.6.0,<0.7.0)"] [[package]] name = "packaging" version = "24.2" description = "Core utilities for Python packages" optional = false python-versions = ">=3.8" groups = ["main", "dev"] files = [ {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, ] [[package]] name = "pathspec" version = "0.12.1" description = "Utility library for gitignore style pattern matching of file paths." optional = false python-versions = ">=3.8" groups = ["dev"] files = [ {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, ] [[package]] name = "platformdirs" version = "4.3.6" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false python-versions = ">=3.8" groups = ["dev"] files = [ {file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"}, {file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"}, ] [package.extras] docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4)"] test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)"] type = ["mypy (>=1.11.2)"] [[package]] name = "pygments" version = "2.19.1" description = "Pygments is a syntax highlighting package written in Python." optional = false python-versions = ">=3.8" groups = ["main", "dev"] files = [ {file = "pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c"}, {file = "pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f"}, ] [package.extras] windows-terminal = ["colorama (>=0.4.6)"] [[package]] name = "python-dateutil" version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" groups = ["main"] files = [ {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, ] [package.dependencies] six = ">=1.5" [[package]] name = "pytoolconfig" version = "1.3.1" description = "Python tool configuration" optional = false python-versions = ">=3.8" groups = ["dev"] files = [ {file = "pytoolconfig-1.3.1-py3-none-any.whl", hash = "sha256:5d8cea8ae1996938ec3eaf44567bbc5ef1bc900742190c439a44a704d6e1b62b"}, {file = "pytoolconfig-1.3.1.tar.gz", hash = "sha256:51e6bd1a6f108238ae6aab6a65e5eed5e75d456be1c2bf29b04e5c1e7d7adbae"}, ] [package.dependencies] packaging = ">=23.2" platformdirs = {version = ">=3.11.0", optional = true, markers = "extra == \"global\""} tomli = {version = ">=2.0.1", markers = "python_version < \"3.11\""} [package.extras] doc = ["sphinx (>=7.1.2)", "tabulate (>=0.9.0)"] gendocs = ["pytoolconfig[doc]", "sphinx (>=7.1.2)", "sphinx-autodoc-typehints (>=1.25.2)", "sphinx-rtd-theme (>=2.0.0)"] global = ["platformdirs (>=3.11.0)"] validation = ["pydantic (>=2.5.3)"] [[package]] name = "pyyaml" version = "6.0.2" description = "YAML parser and emitter for Python" optional = false python-versions = ">=3.8" groups = ["dev"] files = [ {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"}, {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"}, {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"}, {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"}, {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"}, {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"}, {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"}, {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, ] [[package]] name = "requests" version = "2.32.3" description = "Python HTTP for Humans." optional = false python-versions = ">=3.8" groups = ["dev"] files = [ {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, ] [package.dependencies] certifi = ">=2017.4.17" charset-normalizer = ">=2,<4" idna = ">=2.5,<4" urllib3 = ">=1.21.1,<3" [package.extras] socks = ["PySocks (>=1.5.6,!=1.5.7)"] use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "rich" version = "13.9.4" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" optional = false python-versions = ">=3.8.0" groups = ["main", "dev"] files = [ {file = "rich-13.9.4-py3-none-any.whl", hash = "sha256:6049d5e6ec054bf2779ab3358186963bac2ea89175919d699e378b99738c2a90"}, {file = "rich-13.9.4.tar.gz", hash = "sha256:439594978a49a09530cff7ebc4b5c7103ef57baf48d5ea3184f21d9a2befa098"}, ] [package.dependencies] markdown-it-py = ">=2.2.0" pygments = ">=2.13.0,<3.0.0" typing-extensions = {version = ">=4.0.0,<5.0", markers = "python_version < \"3.11\""} [package.extras] jupyter = ["ipywidgets (>=7.5.1,<9)"] [[package]] name = "rope" version = "1.13.0" description = "a python refactoring library..." optional = false python-versions = ">=3.8" groups = ["dev"] files = [ {file = "rope-1.13.0-py3-none-any.whl", hash = "sha256:b435a0c0971244fdcd8741676a9fae697ae614c20cc36003678a7782f25c0d6c"}, {file = "rope-1.13.0.tar.gz", hash = "sha256:51437d2decc8806cd5e9dd1fd9c1306a6d9075ecaf78d191af85fc1dfface880"}, ] [package.dependencies] pytoolconfig = {version = ">=1.2.2", extras = ["global"]} [package.extras] dev = ["build (>=0.7.0)", "pre-commit (>=2.20.0)", "pytest (>=7.0.1)", "pytest-cov (>=4.1.0)", "pytest-timeout (>=2.1.0)"] doc = ["pytoolconfig[doc]", "sphinx (>=4.5.0)", "sphinx-autodoc-typehints (>=1.18.1)", "sphinx-rtd-theme (>=1.0.0)"] release = ["pip-tools (>=6.12.1)", "toml (>=0.10.2)", "twine (>=4.0.2)"] [[package]] name = "ruff" version = "0.9.9" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" groups = ["dev"] files = [ {file = "ruff-0.9.9-py3-none-linux_armv6l.whl", hash = "sha256:628abb5ea10345e53dff55b167595a159d3e174d6720bf19761f5e467e68d367"}, {file = "ruff-0.9.9-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:b6cd1428e834b35d7493354723543b28cc11dc14d1ce19b685f6e68e07c05ec7"}, {file = "ruff-0.9.9-py3-none-macosx_11_0_arm64.whl", hash = "sha256:5ee162652869120ad260670706f3cd36cd3f32b0c651f02b6da142652c54941d"}, {file = "ruff-0.9.9-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3aa0f6b75082c9be1ec5a1db78c6d4b02e2375c3068438241dc19c7c306cc61a"}, {file = "ruff-0.9.9-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:584cc66e89fb5f80f84b05133dd677a17cdd86901d6479712c96597a3f28e7fe"}, {file = "ruff-0.9.9-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abf3369325761a35aba75cd5c55ba1b5eb17d772f12ab168fbfac54be85cf18c"}, {file = "ruff-0.9.9-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:3403a53a32a90ce929aa2f758542aca9234befa133e29f4933dcef28a24317be"}, {file = "ruff-0.9.9-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:18454e7fa4e4d72cffe28a37cf6a73cb2594f81ec9f4eca31a0aaa9ccdfb1590"}, {file = "ruff-0.9.9-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fadfe2c88724c9617339f62319ed40dcdadadf2888d5afb88bf3adee7b35bfb"}, {file = "ruff-0.9.9-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6df104d08c442a1aabcfd254279b8cc1e2cbf41a605aa3e26610ba1ec4acf0b0"}, {file = "ruff-0.9.9-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:d7c62939daf5b2a15af48abbd23bea1efdd38c312d6e7c4cedf5a24e03207e17"}, {file = "ruff-0.9.9-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:9494ba82a37a4b81b6a798076e4a3251c13243fc37967e998efe4cce58c8a8d1"}, {file = "ruff-0.9.9-py3-none-musllinux_1_2_i686.whl", hash = "sha256:4efd7a96ed6d36ef011ae798bf794c5501a514be369296c672dab7921087fa57"}, {file = "ruff-0.9.9-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:ab90a7944c5a1296f3ecb08d1cbf8c2da34c7e68114b1271a431a3ad30cb660e"}, {file = "ruff-0.9.9-py3-none-win32.whl", hash = "sha256:6b4c376d929c25ecd6d87e182a230fa4377b8e5125a4ff52d506ee8c087153c1"}, {file = "ruff-0.9.9-py3-none-win_amd64.whl", hash = "sha256:837982ea24091d4c1700ddb2f63b7070e5baec508e43b01de013dc7eff974ff1"}, {file = "ruff-0.9.9-py3-none-win_arm64.whl", hash = "sha256:3ac78f127517209fe6d96ab00f3ba97cafe38718b23b1db3e96d8b2d39e37ddf"}, {file = "ruff-0.9.9.tar.gz", hash = "sha256:0062ed13f22173e85f8f7056f9a24016e692efeea8704d1a5e8011b8aa850933"}, ] [[package]] name = "semver" version = "3.0.4" description = "Python helper for Semantic Versioning (https://semver.org)" optional = false python-versions = ">=3.7" groups = ["main"] files = [ {file = "semver-3.0.4-py3-none-any.whl", hash = "sha256:9c824d87ba7f7ab4a1890799cec8596f15c1241cb473404ea1cb0c55e4b04746"}, {file = "semver-3.0.4.tar.gz", hash = "sha256:afc7d8c584a5ed0a11033af086e8af226a9c0b206f313e0301f8dd7b6b589602"}, ] [[package]] name = "shtab" version = "1.7.1" description = "Automagic shell tab completion for Python CLI applications" optional = false python-versions = ">=3.7" groups = ["main", "dev"] files = [ {file = "shtab-1.7.1-py3-none-any.whl", hash = "sha256:32d3d2ff9022d4c77a62492b6ec875527883891e33c6b479ba4d41a51e259983"}, {file = "shtab-1.7.1.tar.gz", hash = "sha256:4e4bcb02eeb82ec45920a5d0add92eac9c9b63b2804c9196c1f1fdc2d039243c"}, ] [package.extras] dev = ["pytest (>=6)", "pytest-cov", "pytest-timeout"] [[package]] name = "six" version = "1.17.0" description = "Python 2 and 3 compatibility utilities" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" groups = ["main"] files = [ {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, ] [[package]] name = "sniffio" version = "1.3.1" description = "Sniff out which async library your code is running under" optional = false python-versions = ">=3.7" groups = ["main", "dev"] files = [ {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, ] [[package]] name = "snowballstemmer" version = "2.2.0" description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." optional = false python-versions = "*" groups = ["dev"] files = [ {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, ] [[package]] name = "soupsieve" version = "2.6" description = "A modern CSS selector implementation for Beautiful Soup." optional = false python-versions = ">=3.8" groups = ["dev"] files = [ {file = "soupsieve-2.6-py3-none-any.whl", hash = "sha256:e72c4ff06e4fb6e4b5a9f0f55fe6e81514581fca1515028625d0f299c602ccc9"}, {file = "soupsieve-2.6.tar.gz", hash = "sha256:e2e68417777af359ec65daac1057404a3c8a5455bb8abc36f1a9866ab1a51abb"}, ] [[package]] name = "sphinx" version = "7.4.7" description = "Python documentation generator" optional = false python-versions = ">=3.9" groups = ["dev"] files = [ {file = "sphinx-7.4.7-py3-none-any.whl", hash = "sha256:c2419e2135d11f1951cd994d6eb18a1835bd8fdd8429f9ca375dc1f3281bd239"}, {file = "sphinx-7.4.7.tar.gz", hash = "sha256:242f92a7ea7e6c5b406fdc2615413890ba9f699114a9c09192d7dfead2ee9cfe"}, ] [package.dependencies] alabaster = ">=0.7.14,<0.8.0" babel = ">=2.13" colorama = {version = ">=0.4.6", markers = "sys_platform == \"win32\""} docutils = ">=0.20,<0.22" imagesize = ">=1.3" importlib-metadata = {version = ">=6.0", markers = "python_version < \"3.10\""} Jinja2 = ">=3.1" packaging = ">=23.0" Pygments = ">=2.17" requests = ">=2.30.0" snowballstemmer = ">=2.2" sphinxcontrib-applehelp = "*" sphinxcontrib-devhelp = "*" sphinxcontrib-htmlhelp = ">=2.0.0" sphinxcontrib-jsmath = "*" sphinxcontrib-qthelp = "*" sphinxcontrib-serializinghtml = ">=1.1.9" tomli = {version = ">=2", markers = "python_version < \"3.11\""} [package.extras] docs = ["sphinxcontrib-websupport"] lint = ["flake8 (>=6.0)", "importlib-metadata (>=6.0)", "mypy (==1.10.1)", "pytest (>=6.0)", "ruff (==0.5.2)", "sphinx-lint (>=0.9)", "tomli (>=2)", "types-docutils (==0.21.0.20240711)", "types-requests (>=2.30.0)"] test = ["cython (>=3.0)", "defusedxml (>=0.7.1)", "pytest (>=8.0)", "setuptools (>=70.0)", "typing_extensions (>=4.9)"] [[package]] name = "sphinx-autobuild" version = "2024.10.3" description = "Rebuild Sphinx documentation on changes, with hot reloading in the browser." optional = false python-versions = ">=3.9" groups = ["dev"] files = [ {file = "sphinx_autobuild-2024.10.3-py3-none-any.whl", hash = "sha256:158e16c36f9d633e613c9aaf81c19b0fc458ca78b112533b20dafcda430d60fa"}, {file = "sphinx_autobuild-2024.10.3.tar.gz", hash = "sha256:248150f8f333e825107b6d4b86113ab28fa51750e5f9ae63b59dc339be951fb1"}, ] [package.dependencies] colorama = ">=0.4.6" sphinx = "*" starlette = ">=0.35" uvicorn = ">=0.25" watchfiles = ">=0.20" websockets = ">=11" [package.extras] test = ["httpx", "pytest (>=6)"] [[package]] name = "sphinx-basic-ng" version = "1.0.0b2" description = "A modern skeleton for Sphinx themes." optional = false python-versions = ">=3.7" groups = ["dev"] files = [ {file = "sphinx_basic_ng-1.0.0b2-py3-none-any.whl", hash = "sha256:eb09aedbabfb650607e9b4b68c9d240b90b1e1be221d6ad71d61c52e29f7932b"}, {file = "sphinx_basic_ng-1.0.0b2.tar.gz", hash = "sha256:9ec55a47c90c8c002b5960c57492ec3021f5193cb26cebc2dc4ea226848651c9"}, ] [package.dependencies] sphinx = ">=4.0" [package.extras] docs = ["furo", "ipython", "myst-parser", "sphinx-copybutton", "sphinx-inline-tabs"] [[package]] name = "sphinxcontrib-applehelp" version = "2.0.0" description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books" optional = false python-versions = ">=3.9" groups = ["dev"] files = [ {file = "sphinxcontrib_applehelp-2.0.0-py3-none-any.whl", hash = "sha256:4cd3f0ec4ac5dd9c17ec65e9ab272c9b867ea77425228e68ecf08d6b28ddbdb5"}, {file = "sphinxcontrib_applehelp-2.0.0.tar.gz", hash = "sha256:2f29ef331735ce958efa4734873f084941970894c6090408b079c61b2e1c06d1"}, ] [package.extras] lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] standalone = ["Sphinx (>=5)"] test = ["pytest"] [[package]] name = "sphinxcontrib-devhelp" version = "2.0.0" description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp documents" optional = false python-versions = ">=3.9" groups = ["dev"] files = [ {file = "sphinxcontrib_devhelp-2.0.0-py3-none-any.whl", hash = "sha256:aefb8b83854e4b0998877524d1029fd3e6879210422ee3780459e28a1f03a8a2"}, {file = "sphinxcontrib_devhelp-2.0.0.tar.gz", hash = "sha256:411f5d96d445d1d73bb5d52133377b4248ec79db5c793ce7dbe59e074b4dd1ad"}, ] [package.extras] lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] standalone = ["Sphinx (>=5)"] test = ["pytest"] [[package]] name = "sphinxcontrib-htmlhelp" version = "2.1.0" description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" optional = false python-versions = ">=3.9" groups = ["dev"] files = [ {file = "sphinxcontrib_htmlhelp-2.1.0-py3-none-any.whl", hash = "sha256:166759820b47002d22914d64a075ce08f4c46818e17cfc9470a9786b759b19f8"}, {file = "sphinxcontrib_htmlhelp-2.1.0.tar.gz", hash = "sha256:c9e2916ace8aad64cc13a0d233ee22317f2b9025b9cf3295249fa985cc7082e9"}, ] [package.extras] lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] standalone = ["Sphinx (>=5)"] test = ["html5lib", "pytest"] [[package]] name = "sphinxcontrib-jsmath" version = "1.0.1" description = "A sphinx extension which renders display math in HTML via JavaScript" optional = false python-versions = ">=3.5" groups = ["dev"] files = [ {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"}, {file = "sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178"}, ] [package.extras] test = ["flake8", "mypy", "pytest"] [[package]] name = "sphinxcontrib-qthelp" version = "2.0.0" description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp documents" optional = false python-versions = ">=3.9" groups = ["dev"] files = [ {file = "sphinxcontrib_qthelp-2.0.0-py3-none-any.whl", hash = "sha256:b18a828cdba941ccd6ee8445dbe72ffa3ef8cbe7505d8cd1fa0d42d3f2d5f3eb"}, {file = "sphinxcontrib_qthelp-2.0.0.tar.gz", hash = "sha256:4fe7d0ac8fc171045be623aba3e2a8f613f8682731f9153bb2e40ece16b9bbab"}, ] [package.extras] lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] standalone = ["Sphinx (>=5)"] test = ["defusedxml (>=0.7.1)", "pytest"] [[package]] name = "sphinxcontrib-serializinghtml" version = "2.0.0" description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)" optional = false python-versions = ">=3.9" groups = ["dev"] files = [ {file = "sphinxcontrib_serializinghtml-2.0.0-py3-none-any.whl", hash = "sha256:6e2cb0eef194e10c27ec0023bfeb25badbbb5868244cf5bc5bdc04e4464bf331"}, {file = "sphinxcontrib_serializinghtml-2.0.0.tar.gz", hash = "sha256:e9d912827f872c029017a53f0ef2180b327c3f7fd23c87229f7a8e8b70031d4d"}, ] [package.extras] lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] standalone = ["Sphinx (>=5)"] test = ["pytest"] [[package]] name = "starlette" version = "0.46.0" description = "The little ASGI library that shines." optional = false python-versions = ">=3.9" groups = ["dev"] files = [ {file = "starlette-0.46.0-py3-none-any.whl", hash = "sha256:913f0798bd90ba90a9156383bcf1350a17d6259451d0d8ee27fc0cf2db609038"}, {file = "starlette-0.46.0.tar.gz", hash = "sha256:b359e4567456b28d473d0193f34c0de0ed49710d75ef183a74a5ce0499324f50"}, ] [package.dependencies] anyio = ">=3.6.2,<5" typing-extensions = {version = ">=3.10.0", markers = "python_version < \"3.10\""} [package.extras] full = ["httpx (>=0.27.0,<0.29.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.18)", "pyyaml"] [[package]] name = "tomli" version = "2.2.1" description = "A lil' TOML parser" optional = false python-versions = ">=3.8" groups = ["dev"] markers = "python_version < \"3.11\"" files = [ {file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"}, {file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"}, {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a"}, {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee"}, {file = "tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e"}, {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4"}, {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106"}, {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8"}, {file = "tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff"}, {file = "tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b"}, {file = "tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea"}, {file = "tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8"}, {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192"}, {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222"}, {file = "tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77"}, {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6"}, {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd"}, {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e"}, {file = "tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98"}, {file = "tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4"}, {file = "tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7"}, {file = "tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c"}, {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13"}, {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281"}, {file = "tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272"}, {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140"}, {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2"}, {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744"}, {file = "tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec"}, {file = "tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69"}, {file = "tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc"}, {file = "tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff"}, ] [[package]] name = "tomlkit" version = "0.13.2" description = "Style preserving TOML library" optional = false python-versions = ">=3.8" groups = ["main", "dev"] files = [ {file = "tomlkit-0.13.2-py3-none-any.whl", hash = "sha256:7a974427f6e119197f670fbbbeae7bef749a6c14e793db934baefc1b5f03efde"}, {file = "tomlkit-0.13.2.tar.gz", hash = "sha256:fff5fe59a87295b278abd31bec92c15d9bc4a06885ab12bcea52c71119392e79"}, ] [[package]] name = "typing-extensions" version = "4.12.2" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" groups = ["main", "dev"] files = [ {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, ] markers = {main = "python_version < \"3.13\""} [[package]] name = "urllib3" version = "2.3.0" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.9" groups = ["dev"] files = [ {file = "urllib3-2.3.0-py3-none-any.whl", hash = "sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df"}, {file = "urllib3-2.3.0.tar.gz", hash = "sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d"}, ] [package.extras] brotli = ["brotli (>=1.0.9) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\""] h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] [[package]] name = "uvicorn" version = "0.34.0" description = "The lightning-fast ASGI server." optional = false python-versions = ">=3.9" groups = ["dev"] files = [ {file = "uvicorn-0.34.0-py3-none-any.whl", hash = "sha256:023dc038422502fa28a09c7a30bf2b6991512da7dcdb8fd35fe57cfc154126f4"}, {file = "uvicorn-0.34.0.tar.gz", hash = "sha256:404051050cd7e905de2c9a7e61790943440b3416f49cb409f965d9dcd0fa73e9"}, ] [package.dependencies] click = ">=7.0" h11 = ">=0.8" typing-extensions = {version = ">=4.0", markers = "python_version < \"3.11\""} [package.extras] standard = ["colorama (>=0.4) ; sys_platform == \"win32\"", "httptools (>=0.6.3)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1) ; sys_platform != \"win32\" and sys_platform != \"cygwin\" and platform_python_implementation != \"PyPy\"", "watchfiles (>=0.13)", "websockets (>=10.4)"] [[package]] name = "watchfiles" version = "1.0.4" description = "Simple, modern and high performance file watching and code reload in python." optional = false python-versions = ">=3.9" groups = ["dev"] files = [ {file = "watchfiles-1.0.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:ba5bb3073d9db37c64520681dd2650f8bd40902d991e7b4cfaeece3e32561d08"}, {file = "watchfiles-1.0.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9f25d0ba0fe2b6d2c921cf587b2bf4c451860086534f40c384329fb96e2044d1"}, {file = "watchfiles-1.0.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:47eb32ef8c729dbc4f4273baece89398a4d4b5d21a1493efea77a17059f4df8a"}, {file = "watchfiles-1.0.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:076f293100db3b0b634514aa0d294b941daa85fc777f9c698adb1009e5aca0b1"}, {file = "watchfiles-1.0.4-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1eacd91daeb5158c598fe22d7ce66d60878b6294a86477a4715154990394c9b3"}, {file = "watchfiles-1.0.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:13c2ce7b72026cfbca120d652f02c7750f33b4c9395d79c9790b27f014c8a5a2"}, {file = "watchfiles-1.0.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:90192cdc15ab7254caa7765a98132a5a41471cf739513cc9bcf7d2ffcc0ec7b2"}, {file = "watchfiles-1.0.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:278aaa395f405972e9f523bd786ed59dfb61e4b827856be46a42130605fd0899"}, {file = "watchfiles-1.0.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:a462490e75e466edbb9fc4cd679b62187153b3ba804868452ef0577ec958f5ff"}, {file = "watchfiles-1.0.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8d0d0630930f5cd5af929040e0778cf676a46775753e442a3f60511f2409f48f"}, {file = "watchfiles-1.0.4-cp310-cp310-win32.whl", hash = "sha256:cc27a65069bcabac4552f34fd2dce923ce3fcde0721a16e4fb1b466d63ec831f"}, {file = "watchfiles-1.0.4-cp310-cp310-win_amd64.whl", hash = "sha256:8b1f135238e75d075359cf506b27bf3f4ca12029c47d3e769d8593a2024ce161"}, {file = "watchfiles-1.0.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:2a9f93f8439639dc244c4d2902abe35b0279102bca7bbcf119af964f51d53c19"}, {file = "watchfiles-1.0.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9eea33ad8c418847dd296e61eb683cae1c63329b6d854aefcd412e12d94ee235"}, {file = "watchfiles-1.0.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:31f1a379c9dcbb3f09cf6be1b7e83b67c0e9faabed0471556d9438a4a4e14202"}, {file = "watchfiles-1.0.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ab594e75644421ae0a2484554832ca5895f8cab5ab62de30a1a57db460ce06c6"}, {file = "watchfiles-1.0.4-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fc2eb5d14a8e0d5df7b36288979176fbb39672d45184fc4b1c004d7c3ce29317"}, {file = "watchfiles-1.0.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3f68d8e9d5a321163ddacebe97091000955a1b74cd43724e346056030b0bacee"}, {file = "watchfiles-1.0.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f9ce064e81fe79faa925ff03b9f4c1a98b0bbb4a1b8c1b015afa93030cb21a49"}, {file = "watchfiles-1.0.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b77d5622ac5cc91d21ae9c2b284b5d5c51085a0bdb7b518dba263d0af006132c"}, {file = "watchfiles-1.0.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1941b4e39de9b38b868a69b911df5e89dc43767feeda667b40ae032522b9b5f1"}, {file = "watchfiles-1.0.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4f8c4998506241dedf59613082d1c18b836e26ef2a4caecad0ec41e2a15e4226"}, {file = "watchfiles-1.0.4-cp311-cp311-win32.whl", hash = "sha256:4ebbeca9360c830766b9f0df3640b791be569d988f4be6c06d6fae41f187f105"}, {file = "watchfiles-1.0.4-cp311-cp311-win_amd64.whl", hash = "sha256:05d341c71f3d7098920f8551d4df47f7b57ac5b8dad56558064c3431bdfc0b74"}, {file = "watchfiles-1.0.4-cp311-cp311-win_arm64.whl", hash = "sha256:32b026a6ab64245b584acf4931fe21842374da82372d5c039cba6bf99ef722f3"}, {file = "watchfiles-1.0.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:229e6ec880eca20e0ba2f7e2249c85bae1999d330161f45c78d160832e026ee2"}, {file = "watchfiles-1.0.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5717021b199e8353782dce03bd8a8f64438832b84e2885c4a645f9723bf656d9"}, {file = "watchfiles-1.0.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0799ae68dfa95136dde7c472525700bd48777875a4abb2ee454e3ab18e9fc712"}, {file = "watchfiles-1.0.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:43b168bba889886b62edb0397cab5b6490ffb656ee2fcb22dec8bfeb371a9e12"}, {file = "watchfiles-1.0.4-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fb2c46e275fbb9f0c92e7654b231543c7bbfa1df07cdc4b99fa73bedfde5c844"}, {file = "watchfiles-1.0.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:857f5fc3aa027ff5e57047da93f96e908a35fe602d24f5e5d8ce64bf1f2fc733"}, {file = "watchfiles-1.0.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55ccfd27c497b228581e2838d4386301227fc0cb47f5a12923ec2fe4f97b95af"}, {file = "watchfiles-1.0.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c11ea22304d17d4385067588123658e9f23159225a27b983f343fcffc3e796a"}, {file = "watchfiles-1.0.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:74cb3ca19a740be4caa18f238298b9d472c850f7b2ed89f396c00a4c97e2d9ff"}, {file = "watchfiles-1.0.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:c7cce76c138a91e720d1df54014a047e680b652336e1b73b8e3ff3158e05061e"}, {file = "watchfiles-1.0.4-cp312-cp312-win32.whl", hash = "sha256:b045c800d55bc7e2cadd47f45a97c7b29f70f08a7c2fa13241905010a5493f94"}, {file = "watchfiles-1.0.4-cp312-cp312-win_amd64.whl", hash = "sha256:c2acfa49dd0ad0bf2a9c0bb9a985af02e89345a7189be1efc6baa085e0f72d7c"}, {file = "watchfiles-1.0.4-cp312-cp312-win_arm64.whl", hash = "sha256:22bb55a7c9e564e763ea06c7acea24fc5d2ee5dfc5dafc5cfbedfe58505e9f90"}, {file = "watchfiles-1.0.4-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:8012bd820c380c3d3db8435e8cf7592260257b378b649154a7948a663b5f84e9"}, {file = "watchfiles-1.0.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:aa216f87594f951c17511efe5912808dfcc4befa464ab17c98d387830ce07b60"}, {file = "watchfiles-1.0.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:62c9953cf85529c05b24705639ffa390f78c26449e15ec34d5339e8108c7c407"}, {file = "watchfiles-1.0.4-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7cf684aa9bba4cd95ecb62c822a56de54e3ae0598c1a7f2065d51e24637a3c5d"}, {file = "watchfiles-1.0.4-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f44a39aee3cbb9b825285ff979ab887a25c5d336e5ec3574f1506a4671556a8d"}, {file = "watchfiles-1.0.4-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a38320582736922be8c865d46520c043bff350956dfc9fbaee3b2df4e1740a4b"}, {file = "watchfiles-1.0.4-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:39f4914548b818540ef21fd22447a63e7be6e24b43a70f7642d21f1e73371590"}, {file = "watchfiles-1.0.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f12969a3765909cf5dc1e50b2436eb2c0e676a3c75773ab8cc3aa6175c16e902"}, {file = "watchfiles-1.0.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:0986902677a1a5e6212d0c49b319aad9cc48da4bd967f86a11bde96ad9676ca1"}, {file = "watchfiles-1.0.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:308ac265c56f936636e3b0e3f59e059a40003c655228c131e1ad439957592303"}, {file = "watchfiles-1.0.4-cp313-cp313-win32.whl", hash = "sha256:aee397456a29b492c20fda2d8961e1ffb266223625346ace14e4b6d861ba9c80"}, {file = "watchfiles-1.0.4-cp313-cp313-win_amd64.whl", hash = "sha256:d6097538b0ae5c1b88c3b55afa245a66793a8fec7ada6755322e465fb1a0e8cc"}, {file = "watchfiles-1.0.4-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:d3452c1ec703aa1c61e15dfe9d482543e4145e7c45a6b8566978fbb044265a21"}, {file = "watchfiles-1.0.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7b75fee5a16826cf5c46fe1c63116e4a156924d668c38b013e6276f2582230f0"}, {file = "watchfiles-1.0.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e997802d78cdb02623b5941830ab06f8860038faf344f0d288d325cc9c5d2ff"}, {file = "watchfiles-1.0.4-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e0611d244ce94d83f5b9aff441ad196c6e21b55f77f3c47608dcf651efe54c4a"}, {file = "watchfiles-1.0.4-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9745a4210b59e218ce64c91deb599ae8775c8a9da4e95fb2ee6fe745fc87d01a"}, {file = "watchfiles-1.0.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4810ea2ae622add560f4aa50c92fef975e475f7ac4900ce5ff5547b2434642d8"}, {file = "watchfiles-1.0.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:740d103cd01458f22462dedeb5a3382b7f2c57d07ff033fbc9465919e5e1d0f3"}, {file = "watchfiles-1.0.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdbd912a61543a36aef85e34f212e5d2486e7c53ebfdb70d1e0b060cc50dd0bf"}, {file = "watchfiles-1.0.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0bc80d91ddaf95f70258cf78c471246846c1986bcc5fd33ccc4a1a67fcb40f9a"}, {file = "watchfiles-1.0.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ab0311bb2ffcd9f74b6c9de2dda1612c13c84b996d032cd74799adb656af4e8b"}, {file = "watchfiles-1.0.4-cp39-cp39-win32.whl", hash = "sha256:02a526ee5b5a09e8168314c905fc545c9bc46509896ed282aeb5a8ba9bd6ca27"}, {file = "watchfiles-1.0.4-cp39-cp39-win_amd64.whl", hash = "sha256:a5ae5706058b27c74bac987d615105da17724172d5aaacc6c362a40599b6de43"}, {file = "watchfiles-1.0.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:cdcc92daeae268de1acf5b7befcd6cfffd9a047098199056c72e4623f531de18"}, {file = "watchfiles-1.0.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d8d3d9203705b5797f0af7e7e5baa17c8588030aaadb7f6a86107b7247303817"}, {file = "watchfiles-1.0.4-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bdef5a1be32d0b07dcea3318a0be95d42c98ece24177820226b56276e06b63b0"}, {file = "watchfiles-1.0.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:342622287b5604ddf0ed2d085f3a589099c9ae8b7331df3ae9845571586c4f3d"}, {file = "watchfiles-1.0.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9fe37a2de80aa785d340f2980276b17ef697ab8db6019b07ee4fd28a8359d2f3"}, {file = "watchfiles-1.0.4-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:9d1ef56b56ed7e8f312c934436dea93bfa3e7368adfcf3df4c0da6d4de959a1e"}, {file = "watchfiles-1.0.4-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95b42cac65beae3a362629950c444077d1b44f1790ea2772beaea95451c086bb"}, {file = "watchfiles-1.0.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e0227b8ed9074c6172cf55d85b5670199c99ab11fd27d2c473aa30aec67ee42"}, {file = "watchfiles-1.0.4.tar.gz", hash = "sha256:6ba473efd11062d73e4f00c2b730255f9c1bdd73cd5f9fe5b5da8dbd4a717205"}, ] [package.dependencies] anyio = ">=3.0.0" [[package]] name = "websockets" version = "15.0" description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)" optional = false python-versions = ">=3.9" groups = ["dev"] files = [ {file = "websockets-15.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:5e6ee18a53dd5743e6155b8ff7e8e477c25b29b440f87f65be8165275c87fef0"}, {file = "websockets-15.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ee06405ea2e67366a661ed313e14cf2a86e84142a3462852eb96348f7219cee3"}, {file = "websockets-15.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8711682a629bbcaf492f5e0af72d378e976ea1d127a2d47584fa1c2c080b436b"}, {file = "websockets-15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94c4a9b01eede952442c088d415861b0cf2053cbd696b863f6d5022d4e4e2453"}, {file = "websockets-15.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:45535fead66e873f411c1d3cf0d3e175e66f4dd83c4f59d707d5b3e4c56541c4"}, {file = "websockets-15.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e389efe46ccb25a1f93d08c7a74e8123a2517f7b7458f043bd7529d1a63ffeb"}, {file = "websockets-15.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:67a04754d121ea5ca39ddedc3f77071651fb5b0bc6b973c71c515415b44ed9c5"}, {file = "websockets-15.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:bd66b4865c8b853b8cca7379afb692fc7f52cf898786537dfb5e5e2d64f0a47f"}, {file = "websockets-15.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:a4cc73a6ae0a6751b76e69cece9d0311f054da9b22df6a12f2c53111735657c8"}, {file = "websockets-15.0-cp310-cp310-win32.whl", hash = "sha256:89da58e4005e153b03fe8b8794330e3f6a9774ee9e1c3bd5bc52eb098c3b0c4f"}, {file = "websockets-15.0-cp310-cp310-win_amd64.whl", hash = "sha256:4ff380aabd7a74a42a760ee76c68826a8f417ceb6ea415bd574a035a111fd133"}, {file = "websockets-15.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:dd24c4d256558429aeeb8d6c24ebad4e982ac52c50bc3670ae8646c181263965"}, {file = "websockets-15.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f83eca8cbfd168e424dfa3b3b5c955d6c281e8fc09feb9d870886ff8d03683c7"}, {file = "websockets-15.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4095a1f2093002c2208becf6f9a178b336b7572512ee0a1179731acb7788e8ad"}, {file = "websockets-15.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb915101dfbf318486364ce85662bb7b020840f68138014972c08331458d41f3"}, {file = "websockets-15.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:45d464622314973d78f364689d5dbb9144e559f93dca11b11af3f2480b5034e1"}, {file = "websockets-15.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ace960769d60037ca9625b4c578a6f28a14301bd2a1ff13bb00e824ac9f73e55"}, {file = "websockets-15.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c7cd4b1015d2f60dfe539ee6c95bc968d5d5fad92ab01bb5501a77393da4f596"}, {file = "websockets-15.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4f7290295794b5dec470867c7baa4a14182b9732603fd0caf2a5bf1dc3ccabf3"}, {file = "websockets-15.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3abd670ca7ce230d5a624fd3d55e055215d8d9b723adee0a348352f5d8d12ff4"}, {file = "websockets-15.0-cp311-cp311-win32.whl", hash = "sha256:110a847085246ab8d4d119632145224d6b49e406c64f1bbeed45c6f05097b680"}, {file = "websockets-15.0-cp311-cp311-win_amd64.whl", hash = "sha256:8d7bbbe2cd6ed80aceef2a14e9f1c1b61683194c216472ed5ff33b700e784e37"}, {file = "websockets-15.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:cccc18077acd34c8072578394ec79563664b1c205f7a86a62e94fafc7b59001f"}, {file = "websockets-15.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d4c22992e24f12de340ca5f824121a5b3e1a37ad4360b4e1aaf15e9d1c42582d"}, {file = "websockets-15.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1206432cc6c644f6fc03374b264c5ff805d980311563202ed7fef91a38906276"}, {file = "websockets-15.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d3cc75ef3e17490042c47e0523aee1bcc4eacd2482796107fd59dd1100a44bc"}, {file = "websockets-15.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b89504227a5311610e4be16071465885a0a3d6b0e82e305ef46d9b064ce5fb72"}, {file = "websockets-15.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56e3efe356416bc67a8e093607315951d76910f03d2b3ad49c4ade9207bf710d"}, {file = "websockets-15.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0f2205cdb444a42a7919690238fb5979a05439b9dbb73dd47c863d39640d85ab"}, {file = "websockets-15.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:aea01f40995fa0945c020228ab919b8dfc93fc8a9f2d3d705ab5b793f32d9e99"}, {file = "websockets-15.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a9f8e33747b1332db11cf7fcf4a9512bef9748cb5eb4d3f7fbc8c30d75dc6ffc"}, {file = "websockets-15.0-cp312-cp312-win32.whl", hash = "sha256:32e02a2d83f4954aa8c17e03fe8ec6962432c39aca4be7e8ee346b05a3476904"}, {file = "websockets-15.0-cp312-cp312-win_amd64.whl", hash = "sha256:ffc02b159b65c05f2ed9ec176b715b66918a674bd4daed48a9a7a590dd4be1aa"}, {file = "websockets-15.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:d2244d8ab24374bed366f9ff206e2619345f9cd7fe79aad5225f53faac28b6b1"}, {file = "websockets-15.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:3a302241fbe825a3e4fe07666a2ab513edfdc6d43ce24b79691b45115273b5e7"}, {file = "websockets-15.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:10552fed076757a70ba2c18edcbc601c7637b30cdfe8c24b65171e824c7d6081"}, {file = "websockets-15.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c53f97032b87a406044a1c33d1e9290cc38b117a8062e8a8b285175d7e2f99c9"}, {file = "websockets-15.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1caf951110ca757b8ad9c4974f5cac7b8413004d2f29707e4d03a65d54cedf2b"}, {file = "websockets-15.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8bf1ab71f9f23b0a1d52ec1682a3907e0c208c12fef9c3e99d2b80166b17905f"}, {file = "websockets-15.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bfcd3acc1a81f106abac6afd42327d2cf1e77ec905ae11dc1d9142a006a496b6"}, {file = "websockets-15.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:c8c5c8e1bac05ef3c23722e591ef4f688f528235e2480f157a9cfe0a19081375"}, {file = "websockets-15.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:86bfb52a9cfbcc09aba2b71388b0a20ea5c52b6517c0b2e316222435a8cdab72"}, {file = "websockets-15.0-cp313-cp313-win32.whl", hash = "sha256:26ba70fed190708551c19a360f9d7eca8e8c0f615d19a574292b7229e0ae324c"}, {file = "websockets-15.0-cp313-cp313-win_amd64.whl", hash = "sha256:ae721bcc8e69846af00b7a77a220614d9b2ec57d25017a6bbde3a99473e41ce8"}, {file = "websockets-15.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c348abc5924caa02a62896300e32ea80a81521f91d6db2e853e6b1994017c9f6"}, {file = "websockets-15.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5294fcb410ed0a45d5d1cdedc4e51a60aab5b2b3193999028ea94afc2f554b05"}, {file = "websockets-15.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c24ba103ecf45861e2e1f933d40b2d93f5d52d8228870c3e7bf1299cd1cb8ff1"}, {file = "websockets-15.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc8821a03bcfb36e4e4705316f6b66af28450357af8a575dc8f4b09bf02a3dee"}, {file = "websockets-15.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffc5ae23ada6515f31604f700009e2df90b091b67d463a8401c1d8a37f76c1d7"}, {file = "websockets-15.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ac67b542505186b3bbdaffbc303292e1ee9c8729e5d5df243c1f20f4bb9057e"}, {file = "websockets-15.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:c86dc2068f1c5ca2065aca34f257bbf4f78caf566eb230f692ad347da191f0a1"}, {file = "websockets-15.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:30cff3ef329682b6182c01c568f551481774c476722020b8f7d0daacbed07a17"}, {file = "websockets-15.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:98dcf978d4c6048965d1762abd534c9d53bae981a035bfe486690ba11f49bbbb"}, {file = "websockets-15.0-cp39-cp39-win32.whl", hash = "sha256:37d66646f929ae7c22c79bc73ec4074d6db45e6384500ee3e0d476daf55482a9"}, {file = "websockets-15.0-cp39-cp39-win_amd64.whl", hash = "sha256:24d5333a9b2343330f0f4eb88546e2c32a7f5c280f8dd7d3cc079beb0901781b"}, {file = "websockets-15.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:b499caef4bca9cbd0bd23cd3386f5113ee7378094a3cb613a2fa543260fe9506"}, {file = "websockets-15.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:17f2854c6bd9ee008c4b270f7010fe2da6c16eac5724a175e75010aacd905b31"}, {file = "websockets-15.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89f72524033abbfde880ad338fd3c2c16e31ae232323ebdfbc745cbb1b3dcc03"}, {file = "websockets-15.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1657a9eecb29d7838e3b415458cc494e6d1b194f7ac73a34aa55c6fb6c72d1f3"}, {file = "websockets-15.0-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e413352a921f5ad5d66f9e2869b977e88d5103fc528b6deb8423028a2befd842"}, {file = "websockets-15.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:8561c48b0090993e3b2a54db480cab1d23eb2c5735067213bb90f402806339f5"}, {file = "websockets-15.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:190bc6ef8690cd88232a038d1b15714c258f79653abad62f7048249b09438af3"}, {file = "websockets-15.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:327adab7671f3726b0ba69be9e865bba23b37a605b585e65895c428f6e47e766"}, {file = "websockets-15.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2bd8ef197c87afe0a9009f7a28b5dc613bfc585d329f80b7af404e766aa9e8c7"}, {file = "websockets-15.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:789c43bf4a10cd067c24c321238e800b8b2716c863ddb2294d2fed886fa5a689"}, {file = "websockets-15.0-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7394c0b7d460569c9285fa089a429f58465db930012566c03046f9e3ab0ed181"}, {file = "websockets-15.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2ea4f210422b912ebe58ef0ad33088bc8e5c5ff9655a8822500690abc3b1232d"}, {file = "websockets-15.0-py3-none-any.whl", hash = "sha256:51ffd53c53c4442415b613497a34ba0aa7b99ac07f1e4a62db5dcd640ae6c3c3"}, {file = "websockets-15.0.tar.gz", hash = "sha256:ca36151289a15b39d8d683fd8b7abbe26fc50be311066c5f8dcf3cb8cee107ab"}, ] [[package]] name = "zipp" version = "3.21.0" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false python-versions = ">=3.9" groups = ["dev"] markers = "python_version < \"3.10\"" files = [ {file = "zipp-3.21.0-py3-none-any.whl", hash = "sha256:ac1bbe05fd2991f160ebce24ffbac5f6d11d83dc90891255885223d42b3cd931"}, {file = "zipp-3.21.0.tar.gz", hash = "sha256:2c9958f6430a2040341a52eb608ed6dd93ef4392e02ffe219417c1b28b5dd1f4"}, ] [package.extras] check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] enabler = ["pytest-enabler (>=2.2)"] test = ["big-O", "importlib-resources ; python_version < \"3.9\"", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] type = ["pytest-mypy"] [metadata] lock-version = "2.1" python-versions = "^3.9" content-hash = "b9e51a9643b8ab7ddfb911e77e9382976e0d8d52eaf6fae3e2c2a62f9952bc32" pontos-25.3.2/pontos/000077500000000000000000000000001476255566300144725ustar00rootroot00000000000000pontos-25.3.2/pontos/__init__.py000066400000000000000000000002171476255566300166030ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2021-2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # from .pontos import main __all__ = ["main"] pontos-25.3.2/pontos/changelog/000077500000000000000000000000001476255566300164215ustar00rootroot00000000000000pontos-25.3.2/pontos/changelog/__init__.py000066400000000000000000000005471476255566300205400ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2021-2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # from .conventional_commits import ChangelogBuilder from .errors import ChangelogBuilderError, ChangelogError from .main import main __all__ = ( "ChangelogError", "ChangelogBuilderError", "ChangelogBuilder", "ConventionalCommits", "main", ) pontos-25.3.2/pontos/changelog/_parser.py000066400000000000000000000053651476255566300204370ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2024 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later from argparse import ArgumentParser, Namespace from pathlib import Path from typing import Optional, Sequence import shtab from pontos.version.schemes import ( VERSIONING_SCHEMES, VersioningScheme, versioning_scheme_argument_type, ) def parse_args(args: Optional[Sequence[str]] = None) -> Namespace: parser = ArgumentParser( description="Conventional commits utility. Create a changelog markdown " " text from conventional commits between the current and next release.", prog="pontos-changelog", ) shtab.add_argument_to(parser) parser.add_argument( "--config", "-C", type=Path, help="Optional. Conventional commits config file (toml), including " "conventions. If not provided defaults are used.", ).complete = shtab.FILE # type: ignore[attr-defined] parser.add_argument( "--repository", required=True, help="The github repository (owner/name). Used for building the links " "to the repository.", ) parser.add_argument( "--versioning-scheme", help="Versioning scheme to use for parsing and handling version " f"information. Choices are {', '.join(VERSIONING_SCHEMES.keys())}. " "Default: %(default)s", default="pep440", type=versioning_scheme_argument_type, ) parser.add_argument( "--current-version", help="Version to start looking for changes. All commits since this " "releases are take into account for creating the changelog text.", ) parser.add_argument( "--next-version", "--release-version", dest="next_version", help="The planned release version", ) parser.add_argument( "--git-tag-prefix", default="v", help="Prefix for git tag versions. Used to determine existing " "releases. Default: %(default)s", ) parser.add_argument( "--output", "-o", type=Path, help="Write changelog to this file.", ).complete = shtab.FILE # type: ignore[attr-defined] parser.add_argument( "--quiet", "-q", action="store_true", help="Don't print messages to the terminal", ) parsed_args = parser.parse_args(args=args) scheme: VersioningScheme = parsed_args.versioning_scheme current_version = getattr(parsed_args, "current_version", None) if current_version: parsed_args.current_version = scheme.parse_version(current_version) next_version = getattr(parsed_args, "next_version", None) if next_version: parsed_args.next_version = scheme.parse_version(next_version) return parsed_args pontos-25.3.2/pontos/changelog/conventional_commits.py000066400000000000000000000262451476255566300232360ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2021-2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # import re from collections import defaultdict from datetime import date from pathlib import Path from typing import NamedTuple, Optional, TypedDict, Union import tomlkit from pontos.changelog.errors import ChangelogBuilderError from pontos.git import Git from pontos.typing import SupportsStr ADDRESS = "https://github.com/" DEFAULT_CHANGELOG_CONFIG = """commit_types = [ { message = "^add", group = "Added"}, { message = "^remove", group = "Removed"}, { message = "^change", group = "Changed"}, { message = "^fix", group = "Bug Fixes"}, { message = "^deps", group = "Dependencies"}, ] """ class CommitType(TypedDict): message: str group: str class CommitLogEntry(NamedTuple): commit_id: str message: str class ConventionalCommits: """ Extracts conventional commits from the git log Example: Collect the conventional commits between the tags "v1.2.3" and "v2.0.0" using the default config settings. Afterwards get the list of commits for the "Added" category. .. code-block:: python from pontos.changelog import ConventionalCommits collector = ConventionalCommits() commits = collector.get_commits( from_ref="v1.2.3", to_ref="v2.0.0", ) added = commits.get("Added") """ def __init__( self, config: Optional[Path] = None, ) -> None: """ Create a new ConventionalCommits instance for collecting conventional commits from a git log. Args: config: Optional TOML config for conventional commit parsing settings. """ if config: if not config.exists(): raise ChangelogBuilderError( f"Changelog Config file '{config.absolute()}' does not " "exist." ) self._config = tomlkit.parse(config.read_text(encoding="utf-8")) else: self._config = tomlkit.parse(DEFAULT_CHANGELOG_CONFIG) def get_commits( self, from_ref: Optional[SupportsStr] = None, to_ref: SupportsStr = "HEAD", ) -> dict[str, list[CommitLogEntry]]: """ Get all commits by conventional commit type between a range of git references. Args: from_ref: Git commit ID or reference where to start looking for conventional commits. If None, to_ref is ignored and all conventional commits are returned. to_ref: Git commit ID or reference where to stop looking for conventional commits. By default HEAD is used. Returns: A dict containing the grouped log entries """ commit_list = self._get_git_log(from_ref, to_ref) return self._sort_commits(commit_list) def commit_types(self) -> list[CommitType]: return self._config.get("commit_types", []) def _get_git_log( self, from_ref: Optional[SupportsStr], to_ref: SupportsStr = "HEAD" ) -> list[str]: """Getting the git log for the a range of commits. Requires the fitting branch to be checked out if to_ref is not set. Args: from_ref: Git commit ID or reference of the first log entry. If None, to_ref is ignored and all log entries of the current checked out branch are returned. to_ref: Git commit ID or reference where to stop considering the log entries. By default HEAD is used which points to the last commit of the checked out branch. Returns: A list of `git log` entries """ git = Git() if not from_ref: return git.log(oneline=True) return git.log( f"{from_ref}..{to_ref}", oneline=True, ) def _sort_commits( self, commits: list[str] ) -> dict[str, list[CommitLogEntry]]: """Sort the commits by commit type and group them in a dict ``` { 'Added:': [ ('commit 1', 'message 1'), ('commit 2', 'message 2'), '...', ], 'Fixed:': [ ... ], } ``` Returns The dict containing the commit messages """ expressions = [ ( commit_type["group"], re.compile(rf'{commit_type["message"]}\s?[:|-]', flags=re.I), ) for commit_type in self.commit_types() ] commit_dict = defaultdict(list) if commits and len(commits) > 0: for commit in commits: commit_id, message = commit.split(" ", maxsplit=1) for group, reg in expressions: match = reg.match(message) if match: # remove the commit tag from commit message cleaned_msg = message.replace( match.group(0), "" ).strip() commit_dict[group].append( CommitLogEntry( commit_id=commit_id, message=cleaned_msg ) ) return commit_dict class ChangelogBuilder: """ Creates Changelog from conventional commits using the git log from the latest version. Example: Create a changelog as a string from the changes between git tags "v1.2.3" and "v2.0.0" using the default config settings. .. code-block:: python from pontos.changelog import ChangelogBuilder builder = ChangelogBuilder(repository="my-org/my-project) changelog = builder.create_changelog( last_version="1.2.3", next_version="2.0.0", ) """ def __init__( self, *, repository: str, git_tag_prefix: Optional[str] = "v", config: Optional[Path] = None, ) -> None: """ Create a new ChangelogBuilder instance. Args: repository: GitHub repository (owner/name) to create the changelog for. For example: "octocat/Hello-World" git_tag_prefix: Git tag prefix to use when checking for git tags. Default is "v". config: TOML config for conventional commit parsing settings """ self._repository = repository self._git_tag_prefix = git_tag_prefix self._conventional_commits = ConventionalCommits(config) def create_changelog( self, *, last_version: Optional[SupportsStr] = None, next_version: Optional[SupportsStr] = None, ) -> str: """ Create a changelog Args: last_version: Version of the last release. If None it is considered as the first release. next_version: Version of the to be created release the changelog corresponds to. If None a changelog for an unrelease version will be created. Returns: The created changelog content. """ commit_dict = self._conventional_commits.get_commits( f"{self._git_tag_prefix}{last_version}" if last_version else None ) return self._build_changelog(last_version, next_version, commit_dict) def create_changelog_file( self, output: Union[str, Path], *, last_version: Optional[SupportsStr] = None, next_version: Optional[SupportsStr] = None, ) -> None: """ Create a changelog and write the changelog to a file Args: output: A file path where to store the changelog last_version: Version of the last release. If None it is considered as the first release. next_version: Version of the to be created release the changelog corresponds to. If None a changelog for an unrelease version will be created. """ changelog = self.create_changelog( last_version=last_version, next_version=next_version ) self._write_changelog_file(changelog, output) def _get_first_commit(self) -> str: """ Git the first commit ID for the current branch """ git = Git() return git.rev_list("HEAD", max_parents=0, abbrev_commit=True)[0] def _build_changelog( self, last_version: Optional[SupportsStr], next_version: Optional[SupportsStr], commit_dict: dict[str, list[CommitLogEntry]], ) -> str: """ Building the changelog from the passed commit information. Args: commit_dict: dict containing sorted commits Returns: The changelog content """ # changelog header changelog = [] if next_version: changelog.append( f"## [{next_version}] - {date.today().isoformat()}" ) else: changelog.append("## [Unreleased]") # changelog entries for commit_type in self._conventional_commits.commit_types(): if commit_type["group"] in commit_dict.keys(): changelog.append(f"\n## {commit_type['group']}") for log_entry in commit_dict[commit_type["group"]]: commit_id, commit_message = log_entry commit_link = ( f"{ADDRESS}{self._repository}/" f"commit/{commit_id}" ) msg = f"{commit_message} [{commit_id}]({commit_link})" changelog.append(f"* {msg}") # comparison line (footer) pre = "\n[Unreleased]: " compare_link = f"{ADDRESS}{self._repository}/compare/" if next_version and last_version: pre = f"\n[{next_version}]: " diff = ( f"{self._git_tag_prefix}{last_version}..." f"{self._git_tag_prefix}{next_version}" ) elif next_version: first_commit = self._get_first_commit() pre = f"\n[{next_version}]: " diff = f"{first_commit}...{self._git_tag_prefix}{next_version}" elif last_version: # unreleased version diff = f"{self._git_tag_prefix}{last_version}...HEAD" else: # unreleased version first_commit = self._get_first_commit() diff = f"{first_commit}...HEAD" changelog.append(f"{pre}{compare_link}{diff}") return "\n".join(changelog) def _write_changelog_file( self, changelog: str, output: Union[str, Path] ) -> None: """ Write changelog to an output file Args: changelog: Changelog content to write to output file output: File name to write changelog into """ changelog_file = Path(output) changelog_dir = changelog_file.parent changelog_dir.mkdir(parents=True, exist_ok=True) changelog_file.write_text(changelog, encoding="utf-8") pontos-25.3.2/pontos/changelog/errors.py000066400000000000000000000005131476255566300203060ustar00rootroot00000000000000# Copyright (C) 2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # from pontos.errors import PontosError class ChangelogError(PontosError): """ Some error has occurred during changelog handling """ class ChangelogBuilderError(ChangelogError): """ An error while building a changelog """ pontos-25.3.2/pontos/changelog/main.py000066400000000000000000000032021476255566300177140ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2020-2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # import sys from typing import NoReturn, Optional, Sequence from pontos.changelog.conventional_commits import ChangelogBuilder from pontos.errors import PontosError from pontos.terminal.null import NullTerminal from pontos.terminal.rich import RichTerminal from pontos.version.helper import get_last_release_version from ._parser import parse_args def main(args: Optional[Sequence[str]] = None) -> NoReturn: parsed_args = parse_args(args) term = NullTerminal() if parsed_args.quiet else RichTerminal() if parsed_args.current_version: last_version = parsed_args.current_version else: last_version = get_last_release_version( parsed_args.versioning_scheme.parse_version, git_tag_prefix=parsed_args.git_tag_prefix, ) try: changelog_builder = ChangelogBuilder( config=parsed_args.config, repository=parsed_args.repository, ) if parsed_args.output: changelog_builder.create_changelog_file( parsed_args.output, last_version=last_version, next_version=parsed_args.next_version, ) else: changelog = changelog_builder.create_changelog( last_version=last_version, next_version=parsed_args.next_version, ) term.out(changelog) except KeyboardInterrupt: sys.exit(1) except PontosError as e: term.error(str(e)) sys.exit(2) sys.exit(0) if __name__ == "__main__": main() pontos-25.3.2/pontos/cpe/000077500000000000000000000000001476255566300152415ustar00rootroot00000000000000pontos-25.3.2/pontos/cpe/__init__.py000066400000000000000000000004721476255566300173550ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later from ._cpe import ANY, CPE, NA, CPEParsingError, Part """ Module for parsing and handling Common Platform Enumeration (CPE) information """ __all__ = ( "ANY", "NA", "CPEParsingError", "Part", "CPE", ) pontos-25.3.2/pontos/cpe/_cpe.py000066400000000000000000000600201476255566300165170ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later import re import urllib.parse from dataclasses import dataclass from typing import Any, Optional from pontos.errors import PontosError from pontos.models import StrEnum __all__ = ( "ANY", "NA", "CPEParsingError", "Part", "CPE", ) ANY = "*" NA = "-" class CPEParsingError(PontosError): """ An error occurred while parsing a CPE """ class Part(StrEnum): """ Represents the possible values for a part CPE attribute """ APPLICATION = "a" OPERATING_SYSTEM = "o" HARDWARE_DEVICE = "h" WILDCARD = "*" # wildcard for requesting "all" possible cpe parts def is_uri_binding(cpe: str) -> bool: """ Returns True if cpe is a CPE v2.2 URI string """ return cpe.startswith("cpe:/") def is_formatted_string_binding(cpe: str) -> bool: """ Returns True if cpe is a CPE v2.3 formatted string """ return cpe.startswith("cpe:2.3:") def convert_double_backslash(value: str) -> str: """ Convert a double backslash into s single backslash """ return re.sub("\\\\(\\W)", lambda match: match.group(1), value) def _url_quote(value: str) -> str: """ Quote value according to the pct_encode function from the spec for uri format """ return urllib.parse.quote(value, safe="").lower() def _url_unquote(value: str) -> str: """ Un-quote value according to the the spec for uri format """ return urllib.parse.unquote(value) def pack_extended_attributes( edition: Optional[str], sw_edition: Optional[str], target_sw: Optional[str], target_hw: Optional[str], other: Optional[str], ) -> str: """ Pack the extended attributes (v2.3) for an edition attribute (v2.2) """ if ( (not sw_edition or sw_edition == ANY) and (not target_sw or target_sw == ANY) and (not target_hw or target_hw == ANY) and (not other or other == ANY) ): if not edition or edition == ANY: return "" else: return edition else: return ( f"~{'' if not edition or edition == ANY else edition}" f"~{'' if not sw_edition or sw_edition == ANY else sw_edition}" f"~{'' if not target_sw or target_sw == ANY else target_sw}" f"~{'' if not target_hw or target_hw == ANY else target_hw}" f"~{'' if not other or other == ANY else other}" ) def unpack_edition(edition: str) -> dict[str, Optional[str]]: """ Unpack the edition attribute of v2.2 into extended attributes of v2.3 """ return dict( zip( [ "edition", "sw_edition", "target_sw", "target_hw", "other", ], [None if not a else a for a in edition.split("~")[1:-1]], ) ) def bind_value_for_formatted_string(value: Optional[str]) -> str: """ Convert an attribute value for formatted string representation """ if not value or value == ANY: return ANY value = value.replace("\\.", ".") value = value.replace("\\-", "-") value = value.replace("\\_", "_") return value def _add_quoting(value: str) -> str: """ Add quoting for parsing attributes from formatted string format to Well-Formed CPE Name Data Model (WFN) """ result: list[str] = [] index = 0 embedded = False while index < len(value): c = value[index] if c.isalnum() or c in ["_"]: # just add character result.append(c) index += 1 embedded = True continue if c == "\\": # keep escaped character result.append(value[index : index + 2]) index += 2 embedded = True continue if c == ANY: # An unquoted asterisk must appear at the beginning or # end of the string. if index == 0 or index == (len(value) - 1): result.append(c) index += 1 embedded = True continue else: raise CPEParsingError( "An unquoted asterisk must appear at the beginning or end " f"of '{value}'" ) if c == "?": # An unquoted question mark must appear at the beginning or # end of the string, or in a leading or trailing sequence if ( ( # ? is legal at the beginning or the end (index == 0) or (index == (len(value) - 1)) ) or ( # embedded is false, so must be preceded by ? not embedded and (value[index - 1 : index] == "?") ) or ( # embedded is true, so must be followed by ? embedded and (value[index + 1] == "?") ) ): result.append(c) index += 1 embedded = False continue else: raise CPEParsingError( "An unquoted question mark must appear at the beginning or " f"end, or in a leading or trailing sequence '{value}'" ) # all other characters must be quoted result.append(f"\\{c}") index += 1 embedded = True return "".join(result) def unbind_value_from_formatted_string(value: Optional[str]) -> Optional[str]: """ Convert a formatted string representation to an attribute value for WNF """ if value is None or value == ANY or value == NA: return value return _add_quoting(value) def _transform_for_uri(value: str) -> str: """ Applies transform to convert an attribute for an uri representation The following transformations are applied: - Pass alphanumeric characters thru untouched - Percent-encode quoted non-alphanumerics as needed - Unquoted special characters are mapped to their special forms """ transformed = "" index = 0 while index < len(value): c = value[index] # alpha numeric characters if c.isalnum() or c in ["_", "-", ".", "~"]: transformed += c index += 1 continue # percent encoding if c == "\\": index += 1 next = value[index] transformed += _url_quote(convert_double_backslash(next)) index += 1 continue # special forms if c == "?": transformed += "%01" elif c == "*": transformed += "%02" index += 1 return transformed def bind_value_for_uri(value: Optional[str]) -> str: """ Convert an attribute value for uri representation """ if not value or value == ANY: return "" if value == NA: return value try: return _transform_for_uri(value) except Exception as e: raise CPEParsingError(f"Can't bind '{value}' for URI") from e def unbind_value_uri(value: Optional[str]) -> Optional[str]: """ Convert an uri representation to an attribute value """ if value is None: return None if value == "": return ANY if value == NA: return NA result = "" index = 0 embedded = False while index < len(value): c = value[index] if c == "." or c == "-" or c == "~": result += f"\\{c}" index += 1 embedded = True continue if c != "%": result += c index += 1 embedded = True continue form = value[index : index + 3] if form == "%01": if ( index == 0 or (index == (len(value) - 3)) or (not embedded and (value[index - 3 : index] == "%01")) or ( embedded and (len(value) >= index + 6) and (value[index + 3 : index + 6]) == "%01" ) ): result += "?" else: raise CPEParsingError( "A percent-encoded question mark is not found at the " f"beginning or the end or embedded in sequence '{value}'" ) elif form == "%02": if (index == 0) or (index == (len(value) - 3)): result += "*" else: raise CPEParsingError( "Percent-encoded asterisk is no at the beginning " f"or the end of '{value}'" ) else: result += f"\\{_url_unquote(form)}" index += 3 embedded = True return result def unquote_attribute_value(value: Optional[str]) -> Optional[str]: """ Unquote a Well-Formed CPE Name Data Model (WFN) attribute value """ if not value or "\\" not in value: # do nothing return value index = 0 result = "" while index < len(value): c = value[index] if c == "\\": next_c = value[index + 1] if next_c in ["*", "?"]: # keep escaped asterisks and question marks result += f"{c}{next_c}" else: result += next_c index += 2 continue else: result += c index += 1 return result def split_cpe(cpe: str) -> list[str]: """ Split a CPE into its parts """ if "\\:" in cpe: # houston we have a problem # the cpe string contains an escaped colon (:) parts = [] index = 0 start_index = 0 stripped_cpe = cpe while index < len(cpe): if index > 0 and cpe[index] == ":" and cpe[index - 1] != "\\": part = cpe[start_index:index] parts.append(part) start_index = index + 1 stripped_cpe = cpe[start_index:] index += 1 if stripped_cpe: parts.append(stripped_cpe) else: parts = cpe.split(":") return parts @dataclass(frozen=True) class CPEWellFormed: """ Represents a Common Platform Enumeration (CPE) name using the Well-Formed CPE Name (WNF) Data Model. Attributes are quoted according to the WNF model. In most cases this class should not be used directly and the CPE class should be used instead. Attributes: part: Value should be "a" for application, "o" for operating system or "h" for hardware vendor: Person or organization that manufactured or created the product product: Identifies the most common and recognizable title or name of the product version: A vendor-specific alphanumeric string characterizing the particular release version of the product update: A vendor-specific alphanumeric string characterizing the particular update, service pack, or point release of the product edition: The edition attribute is considered deprecated in the 2.3 CPE specification, and it should be assigned the logical value ANY except where required for backward compatibility with version 2.2 of the CPE specification. This attribute is referred to as the “legacy edition†attribute language: Defines the language supported in the user interface of the product (as language tags defined by RFC5646) sw_edition: Characterizes how the product is tailored to a particular market or class of end users. Extended attribute introduced with version 2.3 of the CPE specification target_sw: Characterizes the software computing environment within which the product operates. Extended attribute introduced with version 2.3 of the CPE specification hardware_sw: Characterizes the instruction set architecture (e.g., x86) on which the product operates. Extended attribute introduced with version 2.3 of the CPE specification other: Captures any other general descriptive or identifying information which is vendor- or product-specific and which does not logically fit in any other attribute value. Extended attribute introduced with version 2.3 of the CPE specification """ part: Part vendor: Optional[str] = None product: Optional[str] = None version: Optional[str] = None update: Optional[str] = None edition: Optional[str] = None language: Optional[str] = None sw_edition: Optional[str] = None target_sw: Optional[str] = None target_hw: Optional[str] = None other: Optional[str] = None class CPE: """ Represents a Common Platform Enumeration (CPE) name Supports CPE specification 2.2 (uri) and 2.3 (formatted string) Attributes: part: Value should be "a" for application, "o" for operating system or "h" for hardware vendor: Person or organization that manufactured or created the product product: Identifies the most common and recognizable title or name of the product version: A vendor-specific alphanumeric string characterizing the particular release version of the product update: A vendor-specific alphanumeric string characterizing the particular update, service pack, or point release of the product edition: The edition attribute is considered deprecated in the 2.3 CPE specification, and it should be assigned the logical value ANY except where required for backward compatibility with version 2.2 of the CPE specification. This attribute is referred to as the “legacy edition†attribute language: Defines the language supported in the user interface of the product (as language tags defined by RFC5646) sw_edition: Characterizes how the product is tailored to a particular market or class of end users. Extended attribute introduced with version 2.3 of the CPE specification target_sw: Characterizes the software computing environment within which the product operates. Extended attribute introduced with version 2.3 of the CPE specification hardware_sw: Characterizes the instruction set architecture (e.g., x86) on which the product operates. Extended attribute introduced with version 2.3 of the CPE specification other: Captures any other general descriptive or identifying information which is vendor- or product-specific and which does not logically fit in any other attribute value. Extended attribute introduced with version 2.3 of the CPE specification cpe_string: The original parsed CPE string Example: .. code-block:: python from pontos.cpe import CPE cpe = CPE.from_string("cpe:2.3:o:google:android:13.0:*:*:*:*:*:*:*") print(cpe.vendor) # google print(cpe.product) # android print(cpe.version) # 13.0 print(cpe.as_uri_binding()) # cpe:/o:google:android:13.0 """ def __init__( self, *, cpe_string: Optional[str] = None, part: Part, vendor: Optional[str] = None, product: Optional[str] = None, version: Optional[str] = None, update: Optional[str] = None, edition: Optional[str] = None, language: Optional[str] = None, sw_edition: Optional[str] = None, target_sw: Optional[str] = None, target_hw: Optional[str] = None, other: Optional[str] = None, ) -> None: self.cpe_string = cpe_string self.__wnf__ = CPEWellFormed( part=part, vendor=vendor, product=product, version=version, update=update, edition=edition, language=language, sw_edition=sw_edition, target_sw=target_sw, target_hw=target_hw, other=other, ) self.part = part self.vendor = unquote_attribute_value(vendor) self.product = unquote_attribute_value(product) self.version = unquote_attribute_value(version) self.update = unquote_attribute_value(update) self.edition = unquote_attribute_value(edition) self.language = unquote_attribute_value(language) self.sw_edition = unquote_attribute_value(sw_edition) self.target_sw = unquote_attribute_value(target_sw) self.target_hw = unquote_attribute_value(target_hw) self.other = unquote_attribute_value(other) @staticmethod def from_string(cpe: str) -> "CPE": """ Create a new CPE from a string """ cleaned_cpe = cpe.strip().lower() parts = split_cpe(cleaned_cpe) if is_uri_binding(cleaned_cpe): values: dict[str, Optional[str]] = dict( zip( [ "vendor", "product", "version", "update", "edition", "language", ], parts[2:], ) ) for attribute in [ "vendor", "product", "version", "update", "language", ]: values[attribute] = unbind_value_uri(values.get(attribute)) edition = values.get("edition") if ( edition is None or edition == "" or edition == NA or edition[0] != "~" ): edition = unbind_value_uri(edition) else: values.update(unpack_edition(edition)) return CPE(cpe_string=cleaned_cpe, part=Part(parts[1][1]), **values) # type: ignore[arg-type] elif is_formatted_string_binding(cleaned_cpe): values = dict( zip( [ "vendor", "product", "version", "update", "edition", "language", "sw_edition", "target_sw", "target_hw", "other", ], [unbind_value_from_formatted_string(a) for a in parts[3:]], ) ) return CPE(cpe_string=cleaned_cpe, part=Part(parts[2]), **values) # type: ignore[arg-type] raise CPEParsingError( f"Invalid CPE string '{cpe}'. CPE does not start with " "'cpe:/' or 'cpe:2.3'" ) def has_extended_attribute(self) -> bool: """ Returns True if the CPE has an extended attribute set """ return bool( self.sw_edition or self.target_sw or self.target_hw or self.other ) def is_uri_binding(self) -> bool: """ Returns True if the CPE is parsed from a URI binding """ if self.cpe_string: return is_uri_binding(self.cpe_string) return not self.has_extended_attribute() def is_formatted_string_binding(self) -> bool: """ Returns True if the CPE is parsed from a formatted string binding """ if self.cpe_string: return is_formatted_string_binding(self.cpe_string) return self.has_extended_attribute() def as_uri_binding(self) -> str: """ Converts the CPE to an URI binding """ part = self.part.value vendor = bind_value_for_uri(self.__wnf__.vendor) product = bind_value_for_uri(self.__wnf__.product) version = bind_value_for_uri(self.__wnf__.version) update = bind_value_for_uri(self.__wnf__.update) language = bind_value_for_uri(self.__wnf__.language) edition = bind_value_for_uri(self.__wnf__.edition) sw_edition = bind_value_for_uri(self.__wnf__.sw_edition) target_sw = bind_value_for_uri(self.__wnf__.target_sw) target_hw = bind_value_for_uri(self.__wnf__.target_hw) other = bind_value_for_uri(self.__wnf__.other) edition = pack_extended_attributes( edition, sw_edition, target_sw, target_hw, other, ) uri = f"cpe:/{part}:{vendor}:{product}" if version or update or edition or language: uri = f"{uri}:{version}" if update or edition or language: uri = f"{uri}:{update}" if edition or language: uri = f"{uri}:{edition}" if language: uri = f"{uri}:{language}" return uri def as_formatted_string_binding(self) -> str: """ Converts the CPE to a formatted string binding """ part = self.part.value vendor = bind_value_for_formatted_string(self.__wnf__.vendor) product = bind_value_for_formatted_string(self.__wnf__.product) version = bind_value_for_formatted_string(self.__wnf__.version) update = bind_value_for_formatted_string(self.__wnf__.update) edition = bind_value_for_formatted_string(self.__wnf__.edition) language = bind_value_for_formatted_string(self.__wnf__.language) sw_edition = bind_value_for_formatted_string(self.__wnf__.sw_edition) target_sw = bind_value_for_formatted_string(self.__wnf__.target_sw) target_hw = bind_value_for_formatted_string(self.__wnf__.target_hw) other = bind_value_for_formatted_string(self.__wnf__.other) return ( f"cpe:2.3:{part}:{vendor}:{product}:{version}:{update}:" f"{edition}:{language}:{sw_edition}:{target_sw}:{target_hw}:{other}" ) def clone( self, **kwargs, ) -> "CPE": """ Clone a CPE and allow to override parts Example: .. code-block:: python from pontos.cpe import CPE, ANY android_13 = CPE.from_string( "cpe:2.3:o:google:android:13.0:*:*:*:*:*:*:*" ) all_android_versions = cpe.clone(version=ANY) """ args = { "part": self.__wnf__.part, "vendor": self.__wnf__.vendor, "product": self.__wnf__.product, "version": self.__wnf__.version, "update": self.__wnf__.update, "edition": self.__wnf__.edition, "language": self.__wnf__.language, "sw_edition": self.__wnf__.sw_edition, "target_sw": self.__wnf__.target_sw, "target_hw": self.__wnf__.target_hw, "other": self.__wnf__.other, "cpe_string": self.cpe_string, } args.update(**kwargs) return CPE(**args) # type: ignore[arg-type] def __str__(self) -> str: """ Returns the string representation (uri of formatted string) of the CPE """ if self.cpe_string: return self.cpe_string if not self.has_extended_attribute(): return self.as_uri_binding() return self.as_formatted_string_binding() def __repr__(self) -> str: return ( f"<{self.__class__.__name__} " f'part="{self.part}" ' f'vendor="{self.vendor}" ' f'product="{self.product}" ' f'version="{self.version}" ' f'update="{self.update}" ' f'edition="{self.edition}" ' f'language="{self.language}" ' f'sw_edition="{self.sw_edition}" ' f'target_sw="{self.target_sw}" ' f'target_hw="{self.target_hw}" ' f'other="{self.other}"' ">" ) def __hash__(self) -> int: return hash(str(self)) def __eq__(self, other: Any) -> bool: if not isinstance(other, CPE): return False return str(self) == str(other) pontos-25.3.2/pontos/enum.py000066400000000000000000000024771476255566300160220ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2024 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later from argparse import ArgumentTypeError from enum import Enum from typing import Callable, Type, TypeVar, Union class StrEnum(str, Enum): # Should be replaced by enum.StrEnum when we require Python >= 3.11 """ An Enum that provides str like behavior """ def __str__(self) -> str: return self.value def enum_choice(enum: Type[Enum]) -> list[str]: """ Return a sequence of choices for argparse from an enum """ return [str(e) for e in enum] def to_choices(enum: Type[Enum]) -> str: """ Convert an enum to a comma separated string of choices. For example useful in help messages for argparse. """ return ", ".join([str(t) for t in enum]) T = TypeVar("T", bound=Enum) def enum_type(enum: Type[T]) -> Callable[[Union[str, T]], T]: """ Create a argparse type function for converting the string input into an Enum """ def convert(value: Union[str, T]) -> T: if isinstance(value, str): try: return enum(value) except ValueError: raise ArgumentTypeError( f"invalid value {value}. Expected one of {to_choices(enum)}." ) from None return value return convert pontos-25.3.2/pontos/errors.py000066400000000000000000000002731476255566300163620ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # class PontosError(Exception): """Base class for all errors originating in pontos""" pontos-25.3.2/pontos/git/000077500000000000000000000000001476255566300152555ustar00rootroot00000000000000pontos-25.3.2/pontos/git/__init__.py000066400000000000000000000007711476255566300173730ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # from ._git import ( DEFAULT_TAG_PREFIX, DEFAULT_TAG_SORT_SUFFIX, ConfigScope, Git, GitError, MergeStrategy, ResetMode, TagSort, ) from ._status import Status, StatusEntry __all__ = ( "DEFAULT_TAG_SORT_SUFFIX", "DEFAULT_TAG_PREFIX", "ConfigScope", "Git", "GitError", "MergeStrategy", "ResetMode", "Status", "StatusEntry", "TagSort", ) pontos-25.3.2/pontos/git/_git.py000066400000000000000000000455511476255566300165630ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # import subprocess from os import PathLike, fspath from pathlib import Path from typing import ( Collection, Iterable, Iterator, Optional, Sequence, Union, ) from pontos.enum import StrEnum from pontos.errors import PontosError from ._status import StatusEntry, parse_git_status DEFAULT_TAG_SORT_SUFFIX = [ "-alpha", "a", "-beta", "b", "-rc", "rc", ] DEFAULT_TAG_PREFIX = "v" class GitError(subprocess.CalledProcessError, PontosError): """ Error raised while executing a git command """ def __str__(self) -> str: cmd = " ".join(self.cmd) return ( f"Git command '{cmd}' returned " f"non-zero exit status {str(self.returncode)}" ) def exec_git( *args: str, ignore_errors: Optional[bool] = False, cwd: Optional[PathLike] = None, ) -> str: """ Internal module function to abstract calling git via subprocess. Most of the cases the Git class should be used. Args: ignore_errors: Set to True if errors while running git should be ignored. Default: False. cwd: Set the current working directory Raises: GitError: Will be raised if ignore_errors is False and git returns with an exit code != 0. Returns: stdout output of git command or empty string if ignore_errors is True and git returns with an exit code != 0. """ try: cmd_args = ["git"] cmd_args.extend(args) output = subprocess.run( cmd_args, cwd=fspath(cwd) if cwd else None, check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, encoding="utf8", errors="replace", ) return output.stdout except subprocess.CalledProcessError as e: if ignore_errors: return "" raise GitError(e.returncode, e.cmd, e.output, e.stderr) from None class MergeStrategy(StrEnum): """ Possible strategies for a merge Attributes: ORT: ORT_OURS: RECURSIVE: OCTOPUS: OURS: SUBTREE: """ ORT = "ort" ORT_OURS = "ort-ours" RECURSIVE = "recursive" RESOLVE = "resolve" OCTOPUS = "octopus" OURS = "ours" SUBTREE = "subtree" class ConfigScope(StrEnum): """ Possible scopes for git settings Attributes: GLOBAL: Apply setting user wide (~/.gitconfig) LOCAL: Apply setting to the local repository only (.git/config) SYSTEM: Apply settings system wide (/etc/gitconfig) WORKTREE: Similar to LOCAL except that $GIT_DIR/config.worktree is used if extensions.worktreeConfig is enabled. If not it's the same as LOCAL. """ GLOBAL = "global" LOCAL = "local" SYSTEM = "system" WORKTREE = "worktree" class TagSort(StrEnum): """ Sorting for git tags Attributes: VERSION: Sort tags by version number """ VERSION = "version:refname" class ResetMode(StrEnum): SOFT = "soft" MIXED = "mixed" HARD = "hard" MERGE = "merge" KEEP = "keep" class Git: """ Run git commands as subprocesses """ def __init__(self, cwd: Optional[Path] = None) -> None: """ Create a new Git instance Args: cwd: Set the current working directory for the git commands """ self._cwd = cwd.absolute() if cwd else None @property def cwd(self) -> Optional[Path]: """ Get the current working directory as Path """ return self._cwd @cwd.setter def cwd(self, cwd: Path) -> None: """ Set the current working directory for all following git commands """ self._cwd = cwd.absolute() @property def version(self) -> str: """ Get the version string of the installed git """ # git --version returns "git version 2.3.4" return self.exec("--version").strip().rsplit(" ", 1)[1] def exec(self, *args: str) -> str: return exec_git(*args, cwd=self._cwd) def init(self, *, bare: Optional[bool] = False) -> None: """ Init a git repository Args: bare: Wether to create a `bare` repository or not. Defaults to false. """ args = ["init"] if bare: args.append("--bare") self.exec(*args) def create_branch( self, branch: str, *, start_point: Optional[str] = None ) -> None: """ Create a new branch Args: branch: Name of the branch to be created start_point: An optional git reference (branch, tag, sha, ...) from where to start the branch """ args = ["checkout", "-b", branch] if start_point: args.append(start_point) self.exec(*args) def rebase( self, base: str, *, head: Optional[str] = None, onto: Optional[str] = None, strategy: Optional[MergeStrategy] = None, ) -> None: """ Rebase a branch Args: base: Apply changes of this branch. head: Apply changes on this branch. If not set the current branch is used. onto: Apply changes on top of this branch. strategy: Merge strategy to use. """ args = ["rebase"] if strategy: if strategy == MergeStrategy.ORT_OURS: args.extend(["--strategy", "ort", "-X", "ours"]) else: args.extend(["--strategy", str(strategy)]) if onto: args.extend(["--onto", onto]) args.append(base) if head: args.append(head) self.exec(*args) def clone( self, repo_url: str, destination: Path, *, branch: Optional[str] = None, remote: Optional[str] = None, depth: Optional[int] = None, ) -> None: """ Clone a repository Args: repo_url: URL of the repo to clone destination: Where to checkout the clone branch: Branch to checkout. By default the default branch is used. remote: Store repo url under this remote name """ args = ["clone"] if remote: args.extend(["-o", remote]) if branch: args.extend(["-b", branch]) if depth: args.extend(["--depth", str(depth)]) args.extend([repo_url, str(destination.absolute())]) self.exec(*args) def push( self, refspec: Optional[Union[str, Iterable[str]]] = None, *, remote: Optional[str] = None, branch: Optional[str] = None, follow_tags: bool = False, force: Optional[bool] = None, delete: Optional[bool] = None, ) -> None: """ Push changes to remote repository Args: refspec: Refs to push remote: Push changes to the named remote branch: Branch to push. Will only be considered in combination with a remote. Deprecated, use refs instead. follow_tags: Push all tags pointing to a commit included in the to be pushed branch. force: Force push changes. delete: Delete remote refspec """ args = ["push"] if follow_tags: args.append("--follow-tags") if force: args.append("--force") if delete: args.append("--delete") if remote: args.append(remote) if branch: args.append(branch) if refspec: if isinstance(refspec, str): args.append(refspec) else: args.extend(refspec) self.exec(*args) def config( self, key: str, value: Optional[str] = None, *, scope: Optional[Union[ConfigScope, str]] = None, ) -> str: """ Get and set a git config Args: key: Key of the Git config setting. For example: core.filemode value: Value to set for a Git setting. scope: Scope of the setting. """ args = ["config"] if scope: args.append(f"--{scope}") args.append(key) if value is not None: args.append(value) return self.exec(*args) def cherry_pick(self, commits: Union[str, list[str]]) -> None: """ Apply changes of a commit(s) to the current branch Args: commit: A single git reference (e.g. sha) of the commit or a list of git references. """ if isinstance(commits, str): commits = [commits] args = ["cherry-pick"] args.extend(commits) self.exec(*args) def list_tags( self, *, sort: Optional[Union[TagSort, str]] = None, tag_name: Optional[str] = None, sort_suffix: Optional[list[str]] = None, ) -> list[str]: """ List all available tags Args: sort: Apply a specific sort algorithm for the git tags. By default git uses a lexicographic sorting. tag_name: Filter list by the tagname pattern. For example: "22.4*" sort_suffix: A list of version suffix to consider. """ if sort: args = [] if sort_suffix: for suffix in sort_suffix: args.extend(["-c", f"versionsort.suffix={suffix}"]) args.extend(["tag", "-l"]) args.append(f"--sort={sort}") else: args = ["tag", "-l"] if tag_name: args.append(tag_name) return self.exec(*args).splitlines() def add( self, files: Union[str, PathLike[str], Sequence[Union[PathLike[str], str]]], ) -> None: """ Add files to the git staging area Args: files: A single file or a list of files to add to the staging area """ if isinstance(files, (PathLike, str)): files = [files] args = ["add"] args.extend([fspath(file) for file in files]) self.exec(*args) def commit( self, message: str, *, verify: Optional[bool] = None, gpg_sign: Optional[bool] = None, gpg_signing_key: Optional[str] = None, ) -> None: """ Create a new commit Args: message: Message of the commit verify: Set to False to skip git hooks gpg_sign: Set to False to skip signing the commit via GPG gpg_signing_key: GPG Key ID to use to sign the commit """ args = ["commit"] if verify is False: args.append("--no-verify") if gpg_signing_key: args.append(f"-S{gpg_signing_key}") if gpg_sign is False: args.append("--no-gpg-sign") args.extend(["-m", message]) self.exec(*args) def tag( self, tag: str, *, gpg_key_id: Optional[str] = None, message: Optional[str] = None, force: Optional[bool] = False, sign: Optional[bool] = None, ) -> None: """ Create a Tag Args: tag: Tag name to create. gpg_key_id: GPG Key to sign the tag. message: Use message to annotate the given tag. force: True to replace an existing tag. sign: Set to False to deactivate signing of the tag. """ args = ["tag"] if gpg_key_id: args.extend(["-u", gpg_key_id]) if message: args.extend(["-m", message]) if force: args.append("--force") if sign is False: args.append("--no-sign") args.append(tag) self.exec(*args) def delete_tag( self, tag: str, ) -> None: """ Delete a Tag Args: tag: Tag name to delete """ args = ["tag", "-d", tag] self.exec(*args) def fetch( self, remote: Optional[str] = None, refspec: Optional[str] = None, *, verbose: bool = False, ) -> None: """ Fetch from changes from remote Args: remote: Remote to fetch changes from refspec: Specifies which refs to fetch and which local refs to update. verbose: Print verbose output. """ args = ["fetch"] if remote: args.append(remote) if refspec: args.append(refspec) if verbose: args.append("-v") self.exec(*args) def add_remote(self, remote: str, url: str) -> None: """ Add a new git remote Args: remote: Name of the new remote url: Git URL of the remote repository """ args = ["remote", "add", remote, url] self.exec(*args) def remote_url(self, remote: str = "origin") -> str: """ Get the url of a remote Args: remote: Name of the remote. Default: origin. """ args = ["remote", "get-url", remote] return self.exec(*args) def checkout( self, branch: str, *, start_point: Optional[str] = None ) -> None: """ Checkout a branch Args: branch: Branch to checkout or new branch name if starting_point is given. start_point: Create a new branch from this git ref. """ if start_point: args = ["checkout", "-b", branch, start_point] else: args = ["checkout", branch] self.exec(*args) def log( self, *log_args: str, oneline: Optional[bool] = None, format: Optional[str] = None, ) -> list[str]: """ Get log of a git repository Args: format: Pretty format the output. log_args: Additional arguments for git log oneline: Print the abbreviated commit id and commit message in one line per commit """ args = ["log"] if format: args.append(f"--format={format}") if oneline: args.append("--oneline") args.extend(log_args) return self.exec(*args).splitlines() def show( self, *show_args: str, format: Optional[str] = None, oneline: Optional[bool] = None, patch: Optional[bool] = None, objects: Union[str, Collection[str], None] = None, ) -> Union[str, list[str]]: """ Show various types of git objects Args: format: Pretty format the output. oneline: Print the abbreviated commit id and commit message in one line per commit. patch: True to generate patch output. False to suppress diff output. show_args: Additional arguments for git show objects: Git objects (commits, refs, ...) to get details for. Returns: A list of details about the passed object the object if more then one object is passed. Otherwise a single details is returned. """ args = ["show"] if format: args.append(f"--format={format}") if oneline: args.append("--oneline") if patch is not None: if patch: args.append("--patch") else: args.append("--no-patch") if objects: if isinstance(objects, str): objects = [objects] args.extend(objects) args.extend(show_args) output = self.exec(*args).strip() return output.splitlines() if objects and len(objects) > 1 else output def rev_list( self, *commit: str, max_parents: Optional[int] = None, abbrev_commit: Optional[bool] = False, ) -> list[str]: """ Lists commit objects in reverse chronological order Args: commit: commit objects. max_parents: Only list nth oldest commits abbrev_commit: Set to True to show prefix that names the commit object uniquely instead of the full commit ID. Examples: This will "list all the commits which are reachable from foo or bar, but not from baz". .. code-block:: python from pontos.git import Git git = Git() git.rev_list("foo", "bar", "^baz") This will return the first commit of foo. .. code-block:: python from pontos.git import Git git = Git() git.rev_list("foo", max_parents=0) """ args = ["rev-list"] if max_parents is not None: args.append(f"--max-parents={max_parents}") if abbrev_commit: args.append("--abbrev-commit") args.extend(commit) return self.exec(*args).splitlines() def move(self, old: PathLike, new: PathLike) -> None: """ Move a file from old to new """ self.exec("mv", fspath(old), fspath(new)) def remove(self, to_remove: PathLike) -> None: """ Remove a file from git """ self.exec("rm", fspath(to_remove)) def status( self, files: Optional[Iterable[PathLike]] = None, ) -> Iterator[StatusEntry]: """Get information about the current git status. Args: files: specify an iterable of :py:class:`os.PathLike` and exclude all other paths for the status. Returns: An iterator of :py:class:`StatusEntry` instances that contain the status of the specific files. """ args = [ "status", "-z", "--ignore-submodules", "--untracked-files=no", ] if files: args.append("--") args.extend([fspath(f) for f in files]) output = self.exec(*args) return parse_git_status(output) def reset( self, commit, *, mode: Union[ResetMode, str], ) -> None: """ Reset the git history Args: commit: Git reference to reset the checked out tree to mode: The reset mode to use Examples: This will "list all the commits which are reachable from foo or bar, but not from baz". .. code-block:: python from pontos.git import Git, ResetMode git = Git() git.reset("HEAD^", mode=ResetMode.HARD) """ args = ["reset", f"--{mode}", commit] self.exec(*args) pontos-25.3.2/pontos/git/_status.py000066400000000000000000000035601476255566300173150ustar00rootroot00000000000000# Copyright (C) 2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # from enum import Enum from pathlib import Path from typing import Iterator class Status(Enum): """ Status of a file in git """ UNMODIFIED = " " MODIFIED = "M" ADDED = "A" DELETED = "D" RENAMED = "R" COPIED = "C" UPDATED = "U" UNTRACKED = "?" IGNORED = "!" class StatusEntry: """ Status of a file in the git index and working tree. Implements the :py:class:`os.PathLike` protocol. Attributes: index: Status in the index working_tree: Status in the working tree path: Path to the file old_path: Set for renamed files """ def __init__(self, status_string: str) -> None: status = status_string[:2] filename = status_string[3:] # Status in the index self.index = Status(status[0]) # Status in the working directory self.working_tree = Status(status[1]) if self.index == Status.RENAMED: new_filename, old_filename = filename.split("\0") self.path = Path(new_filename) self.old_path = Path(old_filename) else: # path of the file in git self.path = Path(filename) def __str__(self) -> str: return f"{self.index.value}{self.working_tree.value} {self.path}" def __repr__(self) -> str: return f"" def __fspath__(self): return self.path.__fspath__() def parse_git_status(output: str) -> Iterator[StatusEntry]: output = output.rstrip("\0") if not output: return output_list = output.split("\0") while output_list: line = output_list.pop(0) if line[0] == Status.RENAMED.value: yield StatusEntry(f"{line}\0{output_list.pop(0)}") else: yield StatusEntry(line) pontos-25.3.2/pontos/github/000077500000000000000000000000001476255566300157545ustar00rootroot00000000000000pontos-25.3.2/pontos/github/__init__.py000066400000000000000000000002151476255566300200630ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # from .main import main __all__ = ["main"] pontos-25.3.2/pontos/github/_parser.py000066400000000000000000000247501476255566300177710ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # """Argument parser for pontos-github""" import os from argparse import ArgumentParser, FileType, Namespace from pathlib import Path from typing import Optional, Sequence import shtab from pontos.enum import enum_choice, enum_type from pontos.github.cmds import ( create_pull_request, create_release, create_tag, file_status, labels, pull_request, release, repos, tag, update_pull_request, ) from pontos.github.models.base import FileStatus from pontos.github.models.organization import RepositoryType body_template = Path(__file__).parent / "pr_template.md" def from_env(name: str) -> str: return os.environ.get(name, name) def parse_args(args: Optional[Sequence[str]] = None) -> Namespace: """ Parsing args for Pontos GitHub Arguments: args The program arguments passed by exec """ parser = ArgumentParser( description="Greenbone GitHub API.", ) shtab.add_argument_to(parser) parser.add_argument( "--quiet", "-q", action="store_true", help="Don't print messages to the terminal", ) parser.add_argument( "--log-file", dest="log_file", type=str, help="Activate logging using the given file path", ).complete = shtab.FILE # type: ignore[attr-defined] subparsers = parser.add_subparsers( title="subcommands", description="Valid subcommands", help="Additional help", dest="command", ) # create a PR from command line pr_parser = subparsers.add_parser( "pull-request", aliases=["pr", "PR", "pullrequest"], help="Pull request related commands", ) pr_parser.set_defaults(func=pull_request) pr_parser.add_argument( "-t", "--token", default="GITHUB_TOKEN", type=from_env, help=( "GitHub Token to access the repository. " "Default looks for environment variable 'GITHUB_TOKEN'" ), ) pr_subparsers = pr_parser.add_subparsers( title="method", dest="pr_method", metavar="name", description="Valid pull request method", help="Pull request method", required=True, ) create_pr_parser = pr_subparsers.add_parser( "create", help="Create Pull Request" ) create_pr_parser.set_defaults(pr_func=create_pull_request) create_pr_parser.add_argument( "repo", help="GitHub repository (owner/name) to use" ) create_pr_parser.add_argument( "head", help=("Branch to create a pull request from"), ) create_pr_parser.add_argument( "target", default="main", help="Branch as as target for the pull. Default: %(default)s", ) create_pr_parser.add_argument( "title", help="Title for the pull request", ) create_pr_parser.add_argument( "-b", "--body", default=body_template.read_text(encoding="utf-8"), help=( "Description for the pull request. Can be formatted in Markdown." ), ) update_pr_parser = pr_subparsers.add_parser( "update", help="Update Pull Request" ) update_pr_parser.set_defaults(pr_func=update_pull_request) update_pr_parser.add_argument( "repo", help="GitHub repository (owner/name) to use" ) update_pr_parser.add_argument( "pull_request", type=int, help="Pull Request to update" ) update_pr_parser.add_argument( "--target", help="Branch as as target for the pull.", ) update_pr_parser.add_argument( "--title", help="Title for the pull request", ) update_pr_parser.add_argument( "-b", "--body", help=( "Description for the pull request. Can be formatted in Markdown." ), ) # get files file_status_parser = subparsers.add_parser( "file-status", aliases=["status", "FS"], help="File status" ) file_status_parser.set_defaults(func=file_status) file_status_parser.add_argument( "repo", help=("GitHub repository (owner/name) to use") ) file_status_parser.add_argument( "pull_request", help="Specify the Pull Request number", type=int ) file_status_parser.add_argument( "-s", "--status", choices=enum_choice(FileStatus), default=[FileStatus.ADDED, FileStatus.MODIFIED], nargs="+", help="What file status should be returned. Default: %(default)s", ) file_status_parser.add_argument( "-o", "--output", type=FileType("w", encoding="utf-8"), help=( "Specify an output file. " "If none is given, output will be prompted" "The file will contain all files, with status " "changes, as given, separated by a newline" ), ) file_status_parser.add_argument( "-t", "--token", default="GITHUB_TOKEN", type=from_env, help=( "GitHub Token to access the repository. " "Default looks for environment variable 'GITHUB_TOKEN'" ), ) # labels label_parser = subparsers.add_parser( "labels", aliases=["L"], help="Issue/pull Request label handling" ) label_parser.set_defaults(func=labels) label_parser.add_argument( "repo", help="GitHub repository (owner/name) to use" ) label_parser.add_argument( "issue", help="Specify the Issue/Pull Request number", type=int ) label_parser.add_argument( "--labels", "-L", nargs="+", help="Specify the labels, that should be set", ) label_parser.add_argument( "-t", "--token", default="GITHUB_TOKEN", type=from_env, help=( "GitHub Token to access the repository. " "Default looks for environment variable 'GITHUB_TOKEN'" ), ) repos_parser = subparsers.add_parser( "repos", aliases=["R"], help="Repository information" ) repos_parser.set_defaults(func=repos) repos_parser.add_argument("orga", help="GitHub organization to use") repos_parser.add_argument( "-t", "--token", default="GITHUB_TOKEN", type=from_env, help=( "GitHub Token to access the repository. " "Default looks for environment variable 'GITHUB_TOKEN'" ), ) repos_parser.add_argument( "--type", choices=enum_choice(RepositoryType), type=enum_type(RepositoryType), default=RepositoryType.PUBLIC, help=( "Define the type of repositories that should be covered. " "Default: %(default)s" ), ) repos_parser.add_argument( "-p", "--path", help="Define the Path to save the Repository Information", ) # create a release from command line re_parser = subparsers.add_parser( "release", aliases=["re", "RE", "release"], help="Release commands" ) re_parser.set_defaults(func=release) re_parser.add_argument( "-t", "--token", default="GITHUB_TOKEN", type=from_env, help=( "GitHub Token to access the repository. " "Default looks for environment variable 'GITHUB_TOKEN'" ), ) re_subparsers = re_parser.add_subparsers( title="method", dest="re_method", metavar="name", description="Valid release method", help="Release method", required=True, ) create_re_parser = re_subparsers.add_parser("create", help="Create release") create_re_parser.set_defaults(re_func=create_release) create_re_parser.add_argument( "repo", help="GitHub repository (owner/name) to use" ) create_re_parser.add_argument( "tag", help="Tag to use for release", ) create_re_parser.add_argument( "name", help="Name of the release", ) create_re_parser.add_argument( "-b", "--body", default=None, help="Description for the Release. Can be formatted in Markdown.", ) create_re_parser.add_argument( "-tc", "--target-commitish", default=None, help="Git reference to use for the release", ) create_re_parser.add_argument( "-d", "--draft", action="store_true", default=False, help="Create a draft release.", ) create_re_parser.add_argument( "-p", "--prerelease", action="store_true", default=False, help="Create a pre-release.", ) # Create a tag from command line tag_parser = subparsers.add_parser( "tag", aliases=["tag", "TAG"], help="Tag commands" ) tag_parser.set_defaults(func=tag) tag_parser.add_argument( "-t", "--token", default="GITHUB_TOKEN", type=from_env, help=( "GitHub Token to access the repository. " "Default looks for environment variable 'GITHUB_TOKEN'" ), ) tag_subparsers = tag_parser.add_subparsers( title="method", dest="tag_method", metavar="name", description="Valid tag method", help="Release method", required=True, ) create_tag_parser = tag_subparsers.add_parser("create", help="Create tag") create_tag_parser.set_defaults(tag_func=create_tag) create_tag_parser.add_argument( "repo", help="GitHub repository (owner/name) to use" ) create_tag_parser.add_argument( "tag", help="Tag name to use", ) create_tag_parser.add_argument( "name", help="Name of the user", ) create_tag_parser.add_argument( "message", help="Tag message", ) create_tag_parser.add_argument( "git_object", help="The SHA of the git object this is tagging.", ) create_tag_parser.add_argument( "email", help="Email address of the user", ) create_tag_parser.add_argument( "-got", "--git-object-type", default="commit", help="The type of the object we're tagging", ) create_tag_parser.add_argument( "-d", "--date", default=None, help=( "When this object was tagged. ISO 8601 format:" " YYYY-MM-DDTHH:MM:SSZ." ), ) return parser.parse_args(args) pontos-25.3.2/pontos/github/actions/000077500000000000000000000000001476255566300174145ustar00rootroot00000000000000pontos-25.3.2/pontos/github/actions/__init__.py000066400000000000000000000011051476255566300215220ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # from .core import ActionIO, ActionOutput, Console from .env import GitHubEnvironment from .errors import GitHubActionsError from .event import ( GitHubEvent, GitHubPullRequestEvent, Label, PullRequestState, Ref, ) from .main import main __all__ = ( "GitHubActionsError", "ActionIO", "ActionOutput", "Console", "GitHubEnvironment", "GitHubEvent", "Label", "Ref", "PullRequestState", "GitHubPullRequestEvent", "main", ) pontos-25.3.2/pontos/github/actions/_parser.py000066400000000000000000000032031476255566300214170ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # """Argument parser for pontos-github-actions""" from argparse import ArgumentParser, Namespace from typing import Optional, Sequence import shtab from .cmds import actions_input, actions_output def split_pairs(value: str): if "=" not in value: raise ValueError(f"Must contain a 'name=value' pair not '{value}'.") return tuple(value.split("=", 1)) def parse_args(args: Optional[Sequence[str]] = None) -> Namespace: """ Parsing args for Pontos GitHub Actions """ parser = ArgumentParser( description="Greenbone GitHub Actions API.", ) shtab.add_argument_to(parser) parser.add_argument( "--quiet", "-q", action="store_true", help="Don't print messages to the terminal", ) subparsers = parser.add_subparsers( title="subcommands", description="valid subcommands", required=True, help="additional help", dest="command", ) output_parser = subparsers.add_parser("output", help="Set output variables") output_parser.add_argument( "output", help="Output as name=value pairs", type=split_pairs, nargs="+" ) output_parser.set_defaults(func=actions_output) input_parser = subparsers.add_parser("input", help="Print input variables") input_parser.add_argument( "input", help="Name of the input variable to print", nargs="+", ) input_parser.add_argument("--format", choices=["json"]) input_parser.set_defaults(func=actions_input) return parser.parse_args(args) pontos-25.3.2/pontos/github/actions/cmds.py000066400000000000000000000015251476255566300207170ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # import json from argparse import Namespace from typing import List, Tuple from pontos.terminal import Terminal from .core import ActionIO def actions_output(_terminal: Terminal, args: Namespace) -> None: """ Set output variables """ output: List[Tuple[str, str]] = args.output for pair in output: name, value = pair ActionIO.output(name, value) def actions_input(terminal: Terminal, args: Namespace) -> None: names: List[str] = args.input inputs = {} for name in names: inputs[name] = ActionIO.input(name) if args.format == "json": terminal.out(json.dumps(inputs)) else: for name, value in inputs.items(): terminal.out(f"{name}={value if value else ''}") pontos-25.3.2/pontos/github/actions/core.py000066400000000000000000000206701476255566300207230ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2021-2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # import os import uuid from contextlib import contextmanager from io import TextIOWrapper from pathlib import Path from typing import Generator, Optional from pontos.typing import SupportsStr from .errors import GitHubActionsError def _to_options( name: Optional[str] = None, line: Optional[str] = None, end_line: Optional[str] = None, column: Optional[str] = None, end_column: Optional[str] = None, title: Optional[str] = None, ): options = [] if name: options.append(f"file={name}") if line: options.append(f"line={line}") if end_line: options.append(f"endLine={end_line}") if column: options.append(f"col={column}") if end_column: options.append(f"endColumn={end_column}") if title: options.append(f"title={title}") return ",".join(options) def _message( message_type: str, message: str, *, name: Optional[str] = None, line: Optional[str] = None, end_line: Optional[str] = None, column: Optional[str] = None, end_column: Optional[str] = None, title: Optional[str] = None, ): options = _to_options(name, line, end_line, column, end_column, title) print(f"::{message_type} {options}::{message}") class Console: """ Class for printing messages to the action console """ @classmethod @contextmanager def group(cls, title: str): """ ContextManager to display a foldable group .. code-block:: python from pontos.github.actions import Console console = Console() with console.group("my-group"): console.log("some message") Args: title: Title of the group """ cls.start_group(title) yield cls.end_group() @staticmethod def start_group(title: str): """ Start a new foldable group Args: title: Title of the group """ print(f"::group::{title}") @staticmethod def end_group(): """ End the last group """ print("::endgroup::") @staticmethod def warning( message: str, *, name: Optional[str] = None, line: Optional[str] = None, end_line: Optional[str] = None, column: Optional[str] = None, end_column: Optional[str] = None, title: Optional[str] = None, ): """ Print a warning message This message will also be shown at the action summary """ _message( "warning", message, name=name, line=line, end_line=end_line, column=column, end_column=end_column, title=title, ) @staticmethod def error( message: str, *, name: Optional[str] = None, line: Optional[str] = None, end_line: Optional[str] = None, column: Optional[str] = None, end_column: Optional[str] = None, title: Optional[str] = None, ): """ Print an error message This message will also be shown at the action summary """ _message( "error", message, name=name, line=line, end_line=end_line, column=column, end_column=end_column, title=title, ) @staticmethod def notice( message: str, *, name: Optional[str] = None, line: Optional[str] = None, end_line: Optional[str] = None, column: Optional[str] = None, end_column: Optional[str] = None, title: Optional[str] = None, ): """ Print a warning message This message will also be shown at the action summary """ _message( "notice", message, name=name, line=line, end_line=end_line, column=column, end_column=end_column, title=title, ) @staticmethod def log( message: str, ): """ Print a message to the console """ print(message) @staticmethod def debug(message: str): # pylint: disable=line-too-long """ Print a debug message to the console These messages are only shown if the secret ACTIONS_STEP_DEBUG is set to true. See https://docs.github.com/en/actions/monitoring-and-troubleshooting-workflows/enabling-debug-logging#enabling-step-debug-logging """ # noqa: E501 print(f"::debug::{message}") class ActionOutput: """ A GitHub Action output """ def __init__(self, file: TextIOWrapper) -> None: self._file = file def write(self, name: str, value: SupportsStr): """ Set action output An action output can be consumed by another job Args: name: Name of the output variable value: Value of the output variable """ self._file.write(f"{name}={value}\n") class ActionIO: """ Class with static methods for handling GitHub Action IO """ @staticmethod def has_output() -> bool: """ Check if GITHUB_OUTPUT is set """ return "GITHUB_OUTPUT" in os.environ @staticmethod @contextmanager def out() -> Generator[ActionOutput, None, None]: """ Create an action output to write several output values An action output can be consumed by another job Example: .. code-block:: python from pontos.github.actions import ActionIO with ActionIO.out() as out: out.write("foo", "bar") out.write("lorem", "ipsum") """ output_filename = os.environ.get("GITHUB_OUTPUT") if not output_filename: raise GitHubActionsError( "GITHUB_OUTPUT environment variable not set. Can't write " "action output." ) with Path(output_filename).open("a", encoding="utf8") as f: yield ActionOutput(f) @staticmethod def output(name: str, value: SupportsStr): """ Set action output An action output can be consumed by another job Example: .. code-block:: python from pontos.github.actions import ActionIO ActionIO.output("foo", "bar") Args: name: Name of the output variable value: Value of the output variable """ output_filename = os.environ.get("GITHUB_OUTPUT") if not output_filename: raise GitHubActionsError( "GITHUB_OUTPUT environment variable not set. Can't write " "action output." ) with Path(output_filename).open("a", encoding="utf8") as f: f.write(f"{name}={value}\n") @staticmethod def multiline_output(name: str, value: SupportsStr): """ Set an multiline action output An action output can be consumed by another job Example: .. code-block:: python from pontos.github.actions import ActionIO ActionIO.output("foo", "bar") Args: name: Name of the output variable value: Value of the output variable """ output_filename = os.environ.get("GITHUB_OUTPUT") if not output_filename: raise GitHubActionsError( "GITHUB_OUTPUT environment variable not set. Can't write " "action output." ) with Path(output_filename).open("a", encoding="utf8") as f: delimiter = uuid.uuid1() f.write(f"{name}<<{delimiter}") f.write(f"{value}") f.write(str(delimiter)) @staticmethod def input(name: str, default: Optional[str] = None) -> Optional[str]: """ Get the value of an action input Example: .. code-block:: python from pontos.github.actions import ActionIO value = ActionIO.input("foo", "bar") Args: name: Name of the input variable default: Use as default if the is no value for the variable """ return os.environ.get( f"INPUT_{name.replace(' ', '_').upper()}", default ) pontos-25.3.2/pontos/github/actions/env.py000066400000000000000000000034121476255566300205560ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2021-2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # import os from pathlib import Path from typing import Optional class GitHubEnvironment: """ Class to handle values from the GitHub Environment https://docs.github.com/en/actions/learn-github-actions/environment-variables """ @property def workspace(self) -> Optional[Path]: workspace = os.environ.get("GITHUB_WORKSPACE") return Path(workspace) if workspace else None @property def repository(self) -> Optional[str]: return os.environ.get("GITHUB_REPOSITORY") @property def sha(self) -> Optional[str]: return os.environ.get("GITHUB_SHA") @property def ref(self) -> Optional[str]: return os.environ.get("GITHUB_REF") @property def ref_name(self) -> Optional[str]: return os.environ.get("GITHUB_REF_NAME") @property def event_path(self) -> Optional[Path]: event_path = os.environ.get("GITHUB_EVENT_PATH") return Path(event_path) if event_path else None @property def head_ref(self) -> Optional[str]: return os.environ.get("GITHUB_HEAD_REF") @property def base_ref(self) -> Optional[str]: return os.environ.get("GITHUB_BASE_REF") @property def api_url(self) -> Optional[str]: return os.environ.get("GITHUB_API_URL") @property def actor(self) -> Optional[str]: return os.environ.get("GITHUB_ACTOR") @property def run_id(self) -> Optional[str]: return os.environ.get("GITHUB_RUN_ID") @property def action_id(self) -> Optional[str]: return os.environ.get("GITHUB_ACTION") @property def is_debug(self) -> bool: return os.environ.get("RUNNER_DEBUG") == "1" pontos-25.3.2/pontos/github/actions/errors.py000066400000000000000000000003621476255566300213030ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # from pontos.errors import PontosError class GitHubActionsError(PontosError): """ A GitHub Actions related error has occurred """ pontos-25.3.2/pontos/github/actions/event.py000066400000000000000000000067521476255566300211210ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2021-2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # import json from dataclasses import dataclass from enum import Enum from pathlib import Path from typing import Any, Dict, Iterable, Optional class PullRequestState(Enum): """ State of a pull request Attributes: OPEN: The pull request is open CLOSED: The pull request is closed """ OPEN = "open" CLOSED = "closed" @dataclass class Label: """ A label of a pull request or issue """ name: str @dataclass class Ref: """ A git branch reference Attributes: name: Name of the git branch reference for example main sha: Git commit ID of the reference """ name: str sha: str @dataclass class GitHubPullRequestEvent: """ Event data of a GitHub Pull Request https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#pull_request Attributes: draft: True if the pull request is a draft number: ID of the pull request labels: Labels attached to the pull request title: Title of the pull request merged: True if the pull request is already merged state: State of the pull request (open, closed) base: Base reference of the pull request (target branch) head: Head reference of the pull request (source branch) """ draft: Optional[bool] number: Optional[int] labels: Optional[Iterable[str]] title: Optional[str] merged: Optional[bool] state: PullRequestState base: Ref head: Ref def __init__(self, pull_request_data: Dict[str, Any]): """ Derive the pull request information from the pull request data of a GitHub event. Args: pull_request_data: JSON based pull request information as dict """ data = pull_request_data or {} self.draft = data.get("draft") self.number = data.get("number") self.labels = [Label(label.get("name")) for label in data.get("labels")] # type: ignore #pylint: disable=line-too-long # noqa: E501 self.title = data.get("title") self.merged = data.get("merged") self.state = PullRequestState(data.get("state")) base = data.get("base") or {} self.base = Ref(base.get("ref"), base.get("sha")) # type: ignore head = data.get("head") or {} self.head = Ref(head.get("ref"), head.get("sha")) # type: ignore @dataclass class GitHubEvent: """ GitHub Actions provides event data for the running action as JSON data in a local file at the runner. The JSON data for the events is specified at https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads Attributes: pull_request: Information about the pull request """ pull_request: GitHubPullRequestEvent def __init__(self, event_path: Path): """ Loads the event data from the passed path Args: event_path: Path to the event data """ content = event_path.read_text(encoding="utf-8") self._event_data = json.loads(content) if content else {} pull_request_data = self._event_data.get("pull_request") self.pull_request = ( GitHubPullRequestEvent(pull_request_data) # type: ignore if pull_request_data else None ) def __str__(self) -> str: return json.dumps(self._event_data, indent=2) pontos-25.3.2/pontos/github/actions/main.py000066400000000000000000000007131476255566300207130ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2021-2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # from pontos.terminal.null import NullTerminal from pontos.terminal.rich import RichTerminal from ._parser import parse_args def main(args=None): parsed_args = parse_args(args) if parsed_args.quiet: term = NullTerminal() else: term = RichTerminal() parsed_args.func(term, parsed_args) if __name__ == "__main__": main() pontos-25.3.2/pontos/github/api/000077500000000000000000000000001476255566300165255ustar00rootroot00000000000000pontos-25.3.2/pontos/github/api/__init__.py000066400000000000000000000030641476255566300206410ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # from .api import GitHubAsyncRESTApi from .artifacts import GitHubAsyncRESTArtifacts from .branch import GitHubAsyncRESTBranches, update_from_applied_settings from .contents import GitHubAsyncRESTContent from .dependabot import GitHubAsyncRESTDependabot from .errors import GitHubApiError from .helper import ( DEFAULT_GITHUB_API_URL, DEFAULT_TIMEOUT_CONFIG, JSON, JSON_OBJECT, ) from .labels import GitHubAsyncRESTLabels from .organizations import GitHubAsyncRESTOrganizations from .packages import GitHubAsyncRESTPackages from .pull_requests import GitHubAsyncRESTPullRequests from .release import GitHubAsyncRESTReleases from .repositories import GitHubAsyncRESTRepositories from .search import GitHubAsyncRESTSearch from .tags import GitHubAsyncRESTTags from .teams import GitHubAsyncRESTTeams from .workflows import GitHubAsyncRESTWorkflows __all__ = [ "DEFAULT_TIMEOUT_CONFIG", "DEFAULT_GITHUB_API_URL", "JSON", "JSON_OBJECT", "update_from_applied_settings", "GitHubApiError", "GitHubAsyncRESTApi", "GitHubAsyncRESTArtifacts", "GitHubAsyncRESTBranches", "GitHubAsyncRESTContent", "GitHubAsyncRESTDependabot", "GitHubAsyncRESTLabels", "GitHubAsyncRESTOrganizations", "GitHubAsyncRESTPackages", "GitHubAsyncRESTPullRequests", "GitHubAsyncRESTReleases", "GitHubAsyncRESTRepositories", "GitHubAsyncRESTSearch", "GitHubAsyncRESTTags", "GitHubAsyncRESTTeams", "GitHubAsyncRESTWorkflows", ] pontos-25.3.2/pontos/github/api/api.py000066400000000000000000000143311476255566300176520ustar00rootroot00000000000000# Copyright (C) 2022 - 2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # from contextlib import AbstractAsyncContextManager from types import TracebackType from typing import Optional, Type import httpx from pontos.github.api.artifacts import GitHubAsyncRESTArtifacts from pontos.github.api.billing import GitHubAsyncRESTBilling from pontos.github.api.branch import GitHubAsyncRESTBranches from pontos.github.api.client import GitHubAsyncRESTClient from pontos.github.api.code_scanning import GitHubAsyncRESTCodeScanning from pontos.github.api.contents import GitHubAsyncRESTContent from pontos.github.api.dependabot import GitHubAsyncRESTDependabot from pontos.github.api.helper import ( DEFAULT_GITHUB_API_URL, DEFAULT_TIMEOUT_CONFIG, ) from pontos.github.api.labels import GitHubAsyncRESTLabels from pontos.github.api.organizations import GitHubAsyncRESTOrganizations from pontos.github.api.packages import GitHubAsyncRESTPackages from pontos.github.api.pull_requests import GitHubAsyncRESTPullRequests from pontos.github.api.release import GitHubAsyncRESTReleases from pontos.github.api.repositories import GitHubAsyncRESTRepositories from pontos.github.api.search import GitHubAsyncRESTSearch from pontos.github.api.secret_scanning import GitHubAsyncRESTSecretScanning from pontos.github.api.tags import GitHubAsyncRESTTags from pontos.github.api.teams import GitHubAsyncRESTTeams from pontos.github.api.users import GitHubAsyncRESTUsers from pontos.github.api.workflows import GitHubAsyncRESTWorkflows from pontos.helper import deprecated class GitHubAsyncRESTApi(AbstractAsyncContextManager): """ A asynchronous GitHub REST API. Should be used as an async context manager. Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: repositories = await api.organizations.get_repositories("foo") """ def __init__( self, token: Optional[str] = None, url: Optional[str] = DEFAULT_GITHUB_API_URL, *, timeout: Optional[httpx.Timeout] = DEFAULT_TIMEOUT_CONFIG, ) -> None: """ Args: token: GitHub API token url: GitHub URL timeout: Timeout settings to use """ self._client = GitHubAsyncRESTClient(token, url, timeout=timeout) @property def organizations(self) -> GitHubAsyncRESTOrganizations: """ Organizations related API """ return GitHubAsyncRESTOrganizations(self._client) @property def artifacts(self) -> GitHubAsyncRESTArtifacts: """ Artifacts related API """ return GitHubAsyncRESTArtifacts(self._client) @property def billing(self) -> GitHubAsyncRESTBilling: """ Billing related API """ return GitHubAsyncRESTBilling(self._client) @property def branches(self) -> GitHubAsyncRESTBranches: """ Branches related API """ return GitHubAsyncRESTBranches(self._client) @property def code_scanning(self) -> GitHubAsyncRESTCodeScanning: """ Code scanning related API """ return GitHubAsyncRESTCodeScanning(self._client) @property def contents(self) -> GitHubAsyncRESTContent: """ Contents related API """ return GitHubAsyncRESTContent(self._client) @property def dependabot(self) -> GitHubAsyncRESTDependabot: """ Dependabot related API """ return GitHubAsyncRESTDependabot(self._client) @property def labels(self) -> GitHubAsyncRESTLabels: """ Labels related API """ return GitHubAsyncRESTLabels(self._client) @property def packages(self) -> GitHubAsyncRESTPackages: """ Packages related API """ return GitHubAsyncRESTPackages(self._client) @property @deprecated( since="23.3.4", reason="The pulls property is obsolete. Please use pull_requests " "instead.", ) def pulls(self) -> GitHubAsyncRESTPullRequests: """ Pull Requests related API .. deprecated:: 23.3.4 Use :py:attr:`pull_requests` instead. """ return GitHubAsyncRESTPullRequests(self._client) @property def pull_requests(self) -> GitHubAsyncRESTPullRequests: """ Pull Requests related API """ return GitHubAsyncRESTPullRequests(self._client) @property def releases(self) -> GitHubAsyncRESTReleases: """ Releases related API """ return GitHubAsyncRESTReleases(self._client) @property def workflows(self) -> GitHubAsyncRESTWorkflows: """ Workflows related API """ return GitHubAsyncRESTWorkflows(self._client) @property def repositories(self) -> GitHubAsyncRESTRepositories: """ Repositories related API """ return GitHubAsyncRESTRepositories(self._client) @property def secret_scanning(self) -> GitHubAsyncRESTSecretScanning: """ Secret scanning related API """ return GitHubAsyncRESTSecretScanning(self._client) @property def search(self) -> GitHubAsyncRESTSearch: """ Search related API """ return GitHubAsyncRESTSearch(self._client) @property def teams(self) -> GitHubAsyncRESTTeams: """ Teams related API """ return GitHubAsyncRESTTeams(self._client) @property def tags(self) -> GitHubAsyncRESTTags: """ Tags related API """ return GitHubAsyncRESTTags(self._client) @property def users(self) -> GitHubAsyncRESTUsers: """ Users related API """ return GitHubAsyncRESTUsers(self._client) async def __aenter__(self) -> "GitHubAsyncRESTApi": await self._client.__aenter__() return self async def __aexit__( self, exc_type: Optional[Type[BaseException]], exc_value: Optional[BaseException], traceback: Optional[TracebackType], ) -> Optional[bool]: return await self._client.__aexit__(exc_type, exc_value, traceback) pontos-25.3.2/pontos/github/api/artifacts.py000066400000000000000000000127271476255566300210700ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # from typing import AsyncContextManager, AsyncIterator, Optional, Union from pontos.github.api.client import GitHubAsyncREST, Params from pontos.github.models.artifact import Artifact from pontos.helper import AsyncDownloadProgressIterable, download_async class GitHubAsyncRESTArtifacts(GitHubAsyncREST): def _get_paged_artifacts( self, api, *, params: Optional[Params] = None ) -> AsyncIterator[Artifact]: return self._get_paged_items( api, "artifacts", Artifact, params=params # type: ignore ) def get_all(self, repo: str) -> AsyncIterator[Artifact]: """ List all artifacts of a repository https://docs.github.com/en/rest/actions/artifacts#list-artifacts-for-a-repository Args: repo: GitHub repository (owner/name) to use Raises: HTTPStatusError: A httpx.HTTPStatusError is raised if the request failed. Returns: An async iterator for the received artifacts Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: async for artifact in api.artifacts.get_all("foo/bar"): print(artifact) """ api = f"/repos/{repo}/actions/artifacts" return self._get_paged_artifacts(api) async def get(self, repo: str, artifact: Union[str, int]) -> Artifact: """ Get a single artifact of a repository https://docs.github.com/en/rest/actions/artifacts#get-an-artifact Args: repo: GitHub repository (owner/name) to use artifact: ID of the artifact Raises: HTTPStatusError: A httpx.HTTPStatusError is raised if the request failed. Returns: Information about the artifact Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: artifact = await api.artifacts.get("foo/bar", 123) print(artifact) """ api = f"/repos/{repo}/actions/artifacts/{artifact}" response = await self._client.get(api) response.raise_for_status() return Artifact.from_dict(response.json()) def get_workflow_run_artifacts( self, repo: str, run: Union[str, int] ) -> AsyncIterator[Artifact]: # pylint: disable=line-too-long """ List all artifacts for a workflow run https://docs.github.com/en/rest/actions/artifacts#list-workflow-run-artifacts Args: repo: GitHub repository (owner/name) to use run: The unique identifier of the workflow run Raises: HTTPStatusError: A httpx.HTTPStatusError is raised if the request failed. Returns: An async iterator for the received artifacts Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: async for artifact in api.artifacts.get_workflow_run_artifacts("foo/bar", 234): print(artifact) """ # noqa: E501 api = f"/repos/{repo}/actions/runs/{run}/artifacts" return self._get_paged_artifacts(api) async def delete(self, repo: str, artifact: Union[str, int]) -> None: """ Delete an artifact of a repository https://docs.github.com/en/rest/actions/artifacts#delete-an-artifact Args: repo: GitHub repository (owner/name) to use artifact: ID of the artifact Raises: HTTPStatusError: A httpx.HTTPStatusError is raised if the request failed. Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: await api.artifacts.delete("foo/bar", 123): """ api = f"/repos/{repo}/actions/artifacts/{artifact}" response = await self._client.delete(api) response.raise_for_status() def download( self, repo: str, artifact: Union[str, int] ) -> AsyncContextManager[AsyncDownloadProgressIterable[bytes]]: """ Download a repository artifact zip file https://docs.github.com/en/rest/actions/artifacts#download-an-artifact Args: repo: GitHub repository (owner/name) to use artifact: ID of the artifact Raises: HTTPStatusError: If the request was invalid Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi("...") as api: print("Downloading", end="") with Path("foo.baz").open("wb") as f: async with api.artifacts.download( "org/repo", "123" ) as download: async for content, progress in download: f.write(content) print(".", end="") """ api = f"/repos/{repo}/actions/artifacts/{artifact}/zip" return download_async(self._client.stream(api)) pontos-25.3.2/pontos/github/api/billing.py000066400000000000000000000064311476255566300205230ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later from pontos.github.api.client import GitHubAsyncREST from pontos.github.models.billing import ( ActionsBilling, PackagesBilling, StorageBilling, ) class GitHubAsyncRESTBilling(GitHubAsyncREST): async def actions(self, organization: str) -> ActionsBilling: """ Get the summary of the free and paid GitHub Actions minutes used https://docs.github.com/en/rest/billing/billing#get-github-actions-billing-for-an-organization Args: organization: The organization name Raises: HTTPStatusError: A httpx.HTTPStatusError is raised if the request failed. Returns: Information about the Actions billing Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: billing = await api.billing.actions("foo") print(billing) """ api = f"/orgs/{organization}/settings/billing/actions" response = await self._client.get(api) response.raise_for_status() return ActionsBilling.from_dict(response.json()) async def packages(self, organization: str) -> PackagesBilling: """ Get the free and paid storage used for GitHub Packages in gigabytes https://docs.github.com/en/rest/billing/billing#get-github-packages-billing-for-an-organization Args: organization: The organization name Raises: HTTPStatusError: A httpx.HTTPStatusError is raised if the request failed. Returns: Information about the Packages billing Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: billing = await api.billing.packages("foo") print(billing) """ api = f"/orgs/{organization}/settings/billing/packages" response = await self._client.get(api) response.raise_for_status() return PackagesBilling.from_dict(response.json()) async def storage(self, organization: str) -> StorageBilling: """ Get the estimated paid and estimated total storage used for GitHub Actions and GitHub Packages https://docs.github.com/en/rest/billing/billing#get-shared-storage-billing-for-an-organization Args: organization: The organization name Raises: HTTPStatusError: A httpx.HTTPStatusError is raised if the request failed. Returns: Information about the storage billing Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: billing = await api.billing.storage("foo") print(billing) """ api = f"/orgs/{organization}/settings/billing/shared-storage" response = await self._client.get(api) response.raise_for_status() return StorageBilling.from_dict(response.json()) pontos-25.3.2/pontos/github/api/branch.py000066400000000000000000001023321476255566300203350ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # from typing import Any, Dict, Iterable, Optional, Tuple from pontos.github.api.client import GitHubAsyncREST from pontos.github.models.branch import BranchProtection GITHUB_ACTIONS_APP_ID = 15368 def update_from_applied_settings( branch_protection: BranchProtection, required_status_checks: Optional[Iterable[Tuple[str, str]]] = None, require_branches_to_be_up_to_date: Optional[bool] = None, enforce_admins: Optional[bool] = None, dismissal_restrictions_users: Optional[Iterable[str]] = None, dismissal_restrictions_teams: Optional[Iterable[str]] = None, dismissal_restrictions_apps: Optional[Iterable[str]] = None, dismiss_stale_reviews: Optional[bool] = None, require_code_owner_reviews: Optional[bool] = None, required_approving_review_count: Optional[int] = None, require_last_push_approval: Optional[bool] = None, bypass_pull_request_allowances_users: Optional[Iterable[str]] = None, bypass_pull_request_allowances_teams: Optional[Iterable[str]] = None, bypass_pull_request_allowances_apps: Optional[Iterable[str]] = None, restrictions_users: Optional[Iterable[str]] = None, restrictions_teams: Optional[Iterable[str]] = None, restrictions_apps: Optional[Iterable[str]] = None, required_linear_history: Optional[bool] = None, allow_force_pushes: Optional[bool] = None, allow_deletions: Optional[bool] = None, block_creations: Optional[bool] = None, required_conversation_resolution: Optional[bool] = None, lock_branch: Optional[bool] = None, allow_fork_syncing: Optional[bool] = None, required_signatures: Optional[bool] = None, ) -> Dict[str, Any]: """ Update branch protection rules from applied settings. Return keyword arguments for update_protection_rules by merging existing settings with desired updated values. """ kwargs: Dict[str, Any] = {} if enforce_admins is not None: kwargs["enforce_admins"] = enforce_admins elif branch_protection.enforce_admins: kwargs["enforce_admins"] = branch_protection.enforce_admins.enabled else: kwargs["enforce_admins"] = None if required_linear_history is not None: kwargs["required_linear_history"] = required_linear_history elif branch_protection.required_linear_history: kwargs["required_linear_history"] = ( branch_protection.required_linear_history.enabled ) else: kwargs["required_linear_history"] = None if allow_force_pushes is not None: kwargs["allow_force_pushes"] = allow_force_pushes elif branch_protection.allow_force_pushes: kwargs["allow_force_pushes"] = ( branch_protection.allow_force_pushes.enabled ) else: kwargs["allow_force_pushes"] = None if allow_deletions is not None: kwargs["allow_deletions"] = allow_deletions elif branch_protection.allow_deletions: kwargs["allow_deletions"] = branch_protection.allow_deletions.enabled else: kwargs["allow_deletions"] = None if required_conversation_resolution is not None: kwargs["required_conversation_resolution"] = ( required_conversation_resolution ) elif branch_protection.required_conversation_resolution: kwargs["required_conversation_resolution"] = ( branch_protection.required_conversation_resolution.enabled ) else: kwargs["required_conversation_resolution"] = None if block_creations is not None: kwargs["block_creations"] = block_creations elif branch_protection.block_creations: kwargs["block_creations"] = branch_protection.block_creations.enabled else: kwargs["block_creations"] = None if lock_branch is not None: kwargs["lock_branch"] = lock_branch elif branch_protection.lock_branch: kwargs["lock_branch"] = branch_protection.lock_branch.enabled else: kwargs["lock_branch"] = None if allow_fork_syncing is not None: kwargs["allow_fork_syncing"] = allow_fork_syncing elif branch_protection.allow_fork_syncing: kwargs["allow_fork_syncing"] = ( branch_protection.allow_fork_syncing.enabled ) else: kwargs["allow_fork_syncing"] = None if required_signatures is not None: kwargs["required_signatures"] = required_signatures elif branch_protection.required_signatures: kwargs["required_signatures"] = ( branch_protection.required_signatures.enabled ) else: kwargs["required_signatures"] = None existing_required_status_checks = branch_protection.required_status_checks if existing_required_status_checks: kwargs["require_branches_to_be_up_to_date"] = ( existing_required_status_checks.strict ) if existing_required_status_checks.checks is not None: kwargs["required_status_checks"] = [ (c.context, c.app_id) for c in existing_required_status_checks.checks ] if required_status_checks is not None: kwargs["required_status_checks"] = required_status_checks if require_branches_to_be_up_to_date: kwargs["require_branches_to_be_up_to_date"] = ( require_branches_to_be_up_to_date ) required_pull_request_reviews = ( branch_protection.required_pull_request_reviews ) if required_pull_request_reviews: dismissal_restrictions = ( required_pull_request_reviews.dismissal_restrictions ) if dismissal_restrictions: if dismissal_restrictions.users: kwargs["dismissal_restrictions_users"] = [ u.login for u in dismissal_restrictions.users ] else: kwargs["dismissal_restrictions_users"] = [] if dismissal_restrictions.teams: kwargs["dismissal_restrictions_teams"] = [ t.slug for t in dismissal_restrictions.teams ] else: kwargs["dismissal_restrictions_teams"] = [] if dismissal_restrictions.apps: kwargs["dismissal_restrictions_apps"] = [ t.slug for t in dismissal_restrictions.apps ] else: kwargs["dismissal_restrictions_apps"] = [] kwargs["dismiss_stale_reviews"] = ( required_pull_request_reviews.dismiss_stale_reviews ) kwargs["require_code_owner_reviews"] = ( required_pull_request_reviews.require_code_owner_reviews ) kwargs["required_approving_review_count"] = ( required_pull_request_reviews.required_approving_review_count ) kwargs["require_last_push_approval"] = ( required_pull_request_reviews.require_last_push_approval ) bypass_pull_request_allowances = ( required_pull_request_reviews.bypass_pull_request_allowances ) if bypass_pull_request_allowances: if bypass_pull_request_allowances.users is not None: kwargs["bypass_pull_request_allowances_users"] = [ u.login for u in bypass_pull_request_allowances.users ] else: kwargs["bypass_pull_request_allowances_users"] = [] if bypass_pull_request_allowances.teams is not None: kwargs["bypass_pull_request_allowances_teams"] = [ t.slug for t in bypass_pull_request_allowances.teams ] else: kwargs["bypass_pull_request_allowances_teams"] = [] if bypass_pull_request_allowances.apps is not None: kwargs["bypass_pull_request_allowances_apps"] = [ a.slug for a in bypass_pull_request_allowances.apps ] else: kwargs["bypass_pull_request_allowances_apps"] = [] existing_restrictions = branch_protection.restrictions if existing_restrictions: if existing_restrictions.users is not None: kwargs["restrictions_users"] = [ u.login for u in existing_restrictions.users ] else: kwargs["restrictions_users"] = [] if existing_restrictions.teams is not None: kwargs["restrictions_teams"] = [ t.slug for t in existing_restrictions.teams ] else: kwargs["restrictions_teams"] = [] if existing_restrictions.apps is not None: kwargs["restrictions_apps"] = [ a.slug for a in existing_restrictions.apps ] else: kwargs["restrictions_apps"] = [] if dismissal_restrictions_users is not None: kwargs["dismissal_restrictions_users"] = list( dismissal_restrictions_users ) if dismissal_restrictions_teams is not None: kwargs["dismissal_restrictions_teams"] = list( dismissal_restrictions_teams ) if dismissal_restrictions_apps is not None: kwargs["dismissal_restrictions_apps"] = list( dismissal_restrictions_apps ) if bypass_pull_request_allowances_users is not None: kwargs["bypass_pull_request_allowances_users"] = list( bypass_pull_request_allowances_users ) if bypass_pull_request_allowances_teams is not None: kwargs["bypass_pull_request_allowances_teams"] = list( bypass_pull_request_allowances_teams ) if bypass_pull_request_allowances_apps is not None: kwargs["bypass_pull_request_allowances_apps"] = list( bypass_pull_request_allowances_apps ) if dismiss_stale_reviews is not None: kwargs["dismiss_stale_reviews"] = dismiss_stale_reviews if require_code_owner_reviews is not None: kwargs["require_code_owner_reviews"] = require_code_owner_reviews if required_approving_review_count is not None: kwargs["required_approving_review_count"] = ( required_approving_review_count ) if require_last_push_approval is not None: kwargs["require_last_push_approval"] = require_last_push_approval if restrictions_users is not None: kwargs["restrictions_users"] = restrictions_users if restrictions_teams is not None: kwargs["restrictions_teams"] = restrictions_teams if restrictions_apps is not None: kwargs["restrictions_apps"] = restrictions_apps return kwargs class GitHubAsyncRESTBranches(GitHubAsyncREST): async def exists(self, repo: str, branch: str) -> bool: """ Check if a single branch in a repository exists Args: repo: GitHub repository (owner/name) to use branch: Branch name to check Returns: True if the branch exists Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: exists = await api.branches.exists("foo/bar", "baz") """ api = f"/repos/{repo}/branches/{branch}" response = await self._client.get(api) return response.is_success async def delete(self, repo: str, branch: str) -> None: """ Delete a branch on GitHub Args: repo: GitHub repository (owner/name) to use branch: Branch to be deleted Raises: HTTPStatusError: If the request was invalid Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: await api.branches.delete("foo/bar", "baz") """ api = f"/repos/{repo}/git/refs/{branch}" response = await self._client.delete(api) response.raise_for_status() async def protection_rules( self, repo: str, branch: str ) -> BranchProtection: """ Get branch protection rules for a specific repository branch https://docs.github.com/en/rest/branches/branch-protection#get-branch-protection Args: repo: GitHub repository (owner/name) to use branch: Get protection rules for this branch Raises: HTTPStatusError: If the request was invalid Returns: The currently applied branch protection rules Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: rules = await api.branches.protection_rules( "foo/bar", "baz" ) """ api = f"/repos/{repo}/branches/{branch}/protection" response = await self._client.get(api) response.raise_for_status() return BranchProtection.from_dict(response.json()) async def update_protection_rules( self, repo: str, branch: str, *, required_status_checks: Optional[Iterable[Tuple[str, str]]] = None, require_branches_to_be_up_to_date: Optional[bool] = None, enforce_admins: Optional[bool] = None, dismissal_restrictions_users: Optional[Iterable[str]] = None, dismissal_restrictions_teams: Optional[Iterable[str]] = None, dismissal_restrictions_apps: Optional[Iterable[str]] = None, dismiss_stale_reviews: Optional[bool] = None, require_code_owner_reviews: Optional[bool] = None, required_approving_review_count: Optional[int] = None, require_last_push_approval: Optional[bool] = None, bypass_pull_request_allowances_users: Optional[Iterable[str]] = None, bypass_pull_request_allowances_teams: Optional[Iterable[str]] = None, bypass_pull_request_allowances_apps: Optional[Iterable[str]] = None, restrictions_users: Optional[Iterable[str]] = None, restrictions_teams: Optional[Iterable[str]] = None, restrictions_apps: Optional[Iterable[str]] = None, required_linear_history: Optional[bool] = None, allow_force_pushes: Optional[bool] = None, allow_deletions: Optional[bool] = None, block_creations: Optional[bool] = None, required_conversation_resolution: Optional[bool] = None, lock_branch: Optional[bool] = None, allow_fork_syncing: Optional[bool] = None, required_signatures: Optional[bool] = None, ) -> BranchProtection: """ Update or create branch protection rules for a specific repository branch. https://docs.github.com/en/rest/branches/branch-protection#update-branch-protection Args: repo: GitHub repository (owner/name) to use branch: Get protection rules for this branch required_status_checks: An iterable of status checks to require in order to merge into this branch. Contains tuples of the name of the required check and the ID of the GitHub App that must provide this check. Set this App ID to None to automatically select the GitHub App that has recently provided this check require_branches_to_be_up_to_date: Require branches to be up to date before merging. enforce_admins: Enforce all configured restrictions for administrators. dismissal_restrictions_users: Specify which users can dismiss pull request reviews. dismissal_restrictions_teams: Specify which teams can dismiss pull request reviews dismissal_restrictions_apps: Specify which apps can dismiss pull request reviews dismiss_stale_reviews: Set to True if you want to automatically dismiss approving reviews when someone pushes a new commit. require_code_owner_reviews: Blocks merging pull requests until code owners review them. required_approving_review_count: Specify the number of reviewers required to approve pull requests. Use a number between 1 and 6 or 0 to not require reviewers. require_last_push_approval: Whether someone other than the person who last pushed to the branch must approve this pull request. bypass_pull_request_allowances_users: The list of user logins allowed to bypass pull request requirements. bypass_pull_request_allowances_teams: The list of team slugs allowed to bypass pull request requirements. bypass_pull_request_allowances_apps: The list of app slugs allowed to bypass pull request requirements. restrictions_users: Restrict users who can push to the protected branch. restrictions_teams: Restrict teams which can push to the protected branch. restrictions_apps: Restrict apps which can push to the protected branch. required_linear_history: Enforce a linear commit Git history. allow_force_pushes: Permit force pushes to the protected branch by anyone with write access to the repository allow_deletions: Allow deletion of the protected branch by anyone with write access to the repository. block_creations: If set to True, the restrictions branch protection settings which limits who can push will also block pushes which create new branches, unless the push is initiated by a user, team, or app which has the ability to push. required_conversation_resolution: Require all conversations on code to be resolved before a pull request can be merged into a branch that matches this rule. lock_branch: Whether to set the branch as read-only. If this is True, users will not be able to push to the branch. allow_fork_syncing: Whether users can pull changes from upstream when the branch is locked. Set to True to allow fork syncing. Set to False to prevent fork syncing. required_signature: True to require signed commits on a branch. Raises: HTTPStatusError: If the request was invalid Returns: The new branch protection rules Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: rules = await api.branches.update_protection_rules( "foo/bar", "baz", enforce_admins=True, ) """ api = f"/repos/{repo}/branches/{branch}/protection" data: Dict[str, Any] = { "enforce_admins": None, "required_status_checks": None, "required_pull_request_reviews": None, "restrictions": None, } if enforce_admins is not None: data["enforce_admins"] = enforce_admins if required_linear_history is not None: data["required_linear_history"] = required_linear_history if allow_force_pushes is not None: data["allow_force_pushes"] = allow_force_pushes if allow_deletions is not None: data["allow_deletions"] = allow_deletions if block_creations is not None: data["block_creations"] = block_creations if required_conversation_resolution is not None: data["required_conversation_resolution"] = ( required_conversation_resolution ) if lock_branch is not None: data["lock_branch"] = lock_branch if allow_fork_syncing is not None: data["allow_fork_syncing"] = allow_fork_syncing if require_branches_to_be_up_to_date is not None: status_checks = data.get("required_status_checks") or {} # checks must be set if strict is set status_checks["checks"] = [] status_checks["strict"] = require_branches_to_be_up_to_date data["required_status_checks"] = status_checks if required_status_checks is not None: status_checks = data.get("required_status_checks") or {} checks = [] for context, app_id in required_status_checks: check = {"context": context} if app_id: check["app_id"] = app_id checks.append(check) status_checks["checks"] = checks data["required_status_checks"] = status_checks if restrictions_users is not None: restrictions = data.get("restrictions") or {} # teams must be set too if users are set r_teams = restrictions.get("teams", []) restrictions["teams"] = r_teams restrictions["users"] = list(restrictions_users) data["restrictions"] = restrictions if restrictions_teams is not None: restrictions = data.get("restrictions") or {} # users must be set too if teams are set r_users = restrictions.get("users", []) restrictions["users"] = r_users restrictions["teams"] = list(restrictions_teams) data["restrictions"] = restrictions if restrictions_apps is not None: restrictions = data.get("restrictions") or {} restrictions["apps"] = list(restrictions_apps) data["restrictions"] = restrictions if dismiss_stale_reviews is not None: required_pull_request_reviews = ( data.get("required_pull_request_reviews") or {} ) required_pull_request_reviews["dismiss_stale_reviews"] = ( dismiss_stale_reviews ) data["required_pull_request_reviews"] = ( required_pull_request_reviews ) if require_code_owner_reviews is not None: required_pull_request_reviews = ( data.get("required_pull_request_reviews") or {} ) required_pull_request_reviews["require_code_owner_reviews"] = ( require_code_owner_reviews ) data["required_pull_request_reviews"] = ( required_pull_request_reviews ) if required_approving_review_count is not None: required_pull_request_reviews = ( data.get("required_pull_request_reviews") or {} ) required_pull_request_reviews["required_approving_review_count"] = ( required_approving_review_count ) data["required_pull_request_reviews"] = ( required_pull_request_reviews ) if require_last_push_approval is not None: required_pull_request_reviews = ( data.get("required_pull_request_reviews") or {} ) required_pull_request_reviews["require_last_push_approval"] = ( require_last_push_approval ) data["required_pull_request_reviews"] = ( required_pull_request_reviews ) if dismissal_restrictions_users is not None: required_pull_request_reviews = ( data.get("required_pull_request_reviews") or {} ) dismissal_restrictions = required_pull_request_reviews.get( "dismissal_restrictions", {} ) dismissal_restrictions["users"] = list(dismissal_restrictions_users) required_pull_request_reviews["dismissal_restrictions"] = ( dismissal_restrictions ) data["required_pull_request_reviews"] = ( required_pull_request_reviews ) if dismissal_restrictions_teams is not None: required_pull_request_reviews = ( data.get("required_pull_request_reviews") or {} ) dismissal_restrictions = required_pull_request_reviews.get( "dismissal_restrictions", {} ) dismissal_restrictions["teams"] = list(dismissal_restrictions_teams) required_pull_request_reviews["dismissal_restrictions"] = ( dismissal_restrictions ) data["required_pull_request_reviews"] = ( required_pull_request_reviews ) if dismissal_restrictions_apps is not None: required_pull_request_reviews = ( data.get("required_pull_request_reviews") or {} ) dismissal_restrictions = required_pull_request_reviews.get( "dismissal_restrictions", {} ) dismissal_restrictions["apps"] = list(dismissal_restrictions_apps) required_pull_request_reviews["dismissal_restrictions"] = ( dismissal_restrictions ) data["required_pull_request_reviews"] = ( required_pull_request_reviews ) if bypass_pull_request_allowances_users is not None: required_pull_request_reviews = ( data.get("required_pull_request_reviews") or {} ) bypass_pull_request_allowances = required_pull_request_reviews.get( "bypass_pull_request_allowances", {} ) bypass_pull_request_allowances["users"] = list( bypass_pull_request_allowances_users ) required_pull_request_reviews["bypass_pull_request_allowances"] = ( bypass_pull_request_allowances ) data["required_pull_request_reviews"] = ( required_pull_request_reviews ) if bypass_pull_request_allowances_teams is not None: required_pull_request_reviews = ( data.get("required_pull_request_reviews") or {} ) bypass_pull_request_allowances = required_pull_request_reviews.get( "bypass_pull_request_allowances", {} ) bypass_pull_request_allowances["teams"] = list( bypass_pull_request_allowances_teams ) required_pull_request_reviews["bypass_pull_request_allowances"] = ( bypass_pull_request_allowances ) data["required_pull_request_reviews"] = ( required_pull_request_reviews ) if bypass_pull_request_allowances_apps is not None: required_pull_request_reviews = ( data.get("required_pull_request_reviews") or {} ) bypass_pull_request_allowances = required_pull_request_reviews.get( "bypass_pull_request_allowances", {} ) bypass_pull_request_allowances["apps"] = list( bypass_pull_request_allowances_apps ) required_pull_request_reviews["bypass_pull_request_allowances"] = ( bypass_pull_request_allowances ) data["required_pull_request_reviews"] = ( required_pull_request_reviews ) if required_signatures is not None: data["required_signatures"] = required_signatures response = await self._client.put(api, data=data) response.raise_for_status() return BranchProtection.from_dict(response.json()) async def delete_protection_rules(self, repo: str, branch: str) -> None: """ Delete branch protection rules for a specific repository branch Args: repo: GitHub repository (owner/name) to use branch: Delete protection rules for this branch Raises: HTTPStatusError: If the request was invalid """ api = f"/repos/{repo}/branches/{branch}/protection" response = await self._client.delete(api) response.raise_for_status() async def set_enforce_admins( self, repo: str, branch: str, *, enforce_admins: bool ) -> None: """ Enable/disable enforce admin branch protection rule for a specific repository branch. Args: repo: GitHub repository (owner/name) to use branch: Delete protection rules for this branch enforce_admins: True to enable. False do disable. Raises: HTTPStatusError: If the request was invalid Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: await api.branches.set_enforce_admins( "foo/bar", "baz", enforce_admins=True, ) """ api = f"/repos/{repo}/branches/{branch}/protection/enforce_admins" if enforce_admins: response = await self._client.post(api) else: response = await self._client.delete(api) response.raise_for_status() async def set_required_signatures( self, repo: str, branch: str, *, required_signatures: bool ) -> None: """ Enable/disable required signed commits for a repository branch. Args: repo: GitHub repository (owner/name) to use branch: Delete protection rules for this branch required_signature: True to enable. False do disable. Raises: HTTPStatusError: If the request was invalid Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: await api.branches.set_required_signatures( "foo/bar", "baz", required_signatures=True, ) """ api = f"/repos/{repo}/branches/{branch}/protection/required_signatures" if required_signatures: response = await self._client.post(api) else: response = await self._client.delete(api) response.raise_for_status() async def update_required_status_checks( self, repo: str, branch: str, *, required_status_checks: Optional[Iterable[Tuple[str, str]]] = None, require_branches_to_be_up_to_date: Optional[bool] = None, ) -> None: """ Update required status check branch protection rules of a repository branch. Args: repo: GitHub repository (owner/name) to use branch: Delete protection rules for this branch required_status_checks: An iterable of status checks to require in order to merge into this branch. Contains tuples of the name of the required check and the ID of the GitHub App that must provide this check. Set this App ID to None to automatically select the GitHub App that has recently provided this check require_branches_to_be_up_to_date: Require branches to be up to date before merging. Raises: HTTPStatusError: If the request was invalid Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: await api.branches.update_required_status_checks( "foo/bar", "baz", required_status_checks=[ ("Unittest", None), ("Linting", None), ], require_branches_to_be_up_to_date=True, ) """ api = ( f"/repos/{repo}/branches/{branch}/protection/required_status_checks" ) data: Dict[str, Any] = {} if require_branches_to_be_up_to_date is not None: data["strict"] = require_branches_to_be_up_to_date if required_status_checks is not None: checks = [] for context, app_id in required_status_checks: check = {"context": context} if app_id: check["app_id"] = app_id checks.append(check) data["checks"] = checks response = await self._client.patch(api, data=data) response.raise_for_status() async def remove_required_status_checks( self, repo: str, branch: str, ) -> None: """ Remove required status check branch protection rules of a repository branch. Args: repo: GitHub repository (owner/name) to use branch: Delete protection rules for this branch Raises: HTTPStatusError: If the request was invalid Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: await api.branches.remove_required_status_checks( "foo/bar", "baz" ) """ api = ( f"/repos/{repo}/branches/{branch}/protection/required_status_checks" ) response = await self._client.delete(api) response.raise_for_status() pontos-25.3.2/pontos/github/api/client.py000066400000000000000000000216731476255566300203660ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # from contextlib import AbstractAsyncContextManager from types import TracebackType from typing import ( Any, AsyncContextManager, AsyncIterator, Dict, Mapping, Optional, Type, Union, ) import httpx from pontos.github.api.helper import ( DEFAULT_GITHUB_API_URL, DEFAULT_TIMEOUT_CONFIG, JSON, JSON_OBJECT, _get_next_url, ) from pontos.github.models.base import GitHubModel Headers = Mapping[str, str] ParamValue = Union[str, None] Params = Mapping[str, ParamValue] # supported GitHub API version # https://docs.github.com/en/rest/overview/api-versions GITHUB_API_VERSION = "2022-11-28" DEFAULT_ACCEPT_HEADER = "application/vnd.github+json" ACCEPT_HEADER_OCTET_STREAM = "application/octet-stream" class GitHubAsyncRESTClient(AbstractAsyncContextManager): """ A client for calling the GitHub REST API asynchronously Should be used as an async context manager Example: .. code-block:: python org = "foo" async with GitHubAsyncRESTClient(token) as client: response = await client.get(f"/orgs/{org}/repos") """ def __init__( self, token: Optional[str] = None, url: Optional[str] = DEFAULT_GITHUB_API_URL, *, timeout: Optional[httpx.Timeout] = DEFAULT_TIMEOUT_CONFIG, ) -> None: self.token = token self.url = url self._client = httpx.AsyncClient(timeout=timeout, http2=True) def _request_headers( self, *, accept: Optional[str] = None, content_type: Optional[str] = None, content_length: Optional[int] = None, ) -> Headers: """ Get the default request headers """ headers: Dict[str, str] = { "Accept": accept or DEFAULT_ACCEPT_HEADER, "X-GitHub-Api-Version": GITHUB_API_VERSION, } if self.token: headers["Authorization"] = f"token {self.token}" if content_type: headers["Content-Type"] = content_type if content_length: headers["Content-Length"] = str(content_length) return headers def _request_kwargs( self, *, json: Optional[JSON] = None, content: Optional[Any] = None ) -> JSON: """ Get the default request arguments """ kwargs = {} if json is not None: kwargs["json"] = json if content is not None: kwargs["content"] = content return kwargs # type: ignore def _request_api_url(self, api: str) -> str: return f"{self.url}{api}" def _request_url(self, api_or_url: str) -> str: return ( api_or_url if api_or_url.startswith("https") else self._request_api_url(api_or_url) ) async def get( self, api: str, *, params: Optional[Params] = None, ) -> httpx.Response: """ Get request to a GitHub API Args: api: API path to use for the get request params: Optional params to use for the get request """ url = self._request_url(api) headers = self._request_headers() kwargs = self._request_kwargs() return await self._client.get( # type: ignore url, headers=headers, params=params, follow_redirects=True, **kwargs, ) async def get_all( self, api: str, *, params: Optional[Params] = None, ) -> AsyncIterator[httpx.Response]: """ Get paginated content of a get GitHub API request Args: api: API path to use for the get request params: Optional params to use for the get request """ response = await self.get(api, params=params) yield response next_url = _get_next_url(response) while next_url: # Workaround for https://github.com/encode/httpx/issues/3433 new_params = ( httpx.URL(next_url).params.merge(params) if params else None ) response = await self.get(next_url, params=new_params) yield response next_url = _get_next_url(response) async def delete( self, api: str, *, params: Optional[Params] = None ) -> httpx.Response: """ Delete request to a GitHub API Args: api: API path to use for the delete request params: Optional params to use for the delete request """ headers = self._request_headers() url = self._request_url(api) return await self._client.delete(url, params=params, headers=headers) async def post( self, api: str, *, data: Optional[JSON] = None, params: Optional[Params] = None, content: Optional[str] = None, content_type: Optional[str] = None, content_length: Optional[int] = None, ) -> httpx.Response: """ Post request to a GitHub API Args: api: API path to use for the post request params: Optional params to use for the post request data: Optional data to include in the post request """ headers = self._request_headers( content_type=content_type, content_length=content_length ) url = self._request_url(api) return await self._client.post( url, params=params, headers=headers, json=data, content=content ) async def put( self, api: str, *, data: Optional[JSON] = None, params: Optional[Params] = None, content: Optional[str] = None, content_type: Optional[str] = None, ) -> httpx.Response: """ Put request to a GitHub API Args: api: API path to use for the put request params: Optional params to use for the put request data: Optional data to include in the put request """ headers = self._request_headers(content_type=content_type) url = self._request_url(api) return await self._client.put( url, params=params, headers=headers, json=data, content=content ) async def patch( self, api: str, *, data: Optional[JSON] = None, params: Optional[Params] = None, content: Optional[str] = None, content_type: Optional[str] = None, ) -> httpx.Response: """ Patch request to a GitHub API Args: api: API path to use for the patch request params: Optional params to use for the patch request data: Optional data to include in the patch request """ headers = self._request_headers(content_type=content_type) url = self._request_url(api) return await self._client.patch( url, params=params, headers=headers, json=data, content=content ) def stream( self, api: str, *, accept: Optional[str] = None, ) -> AsyncContextManager[httpx.Response]: """ Stream data from a GitHub API Args: api: API path to use for the post request accept: Expected content type in the response. Default "application/octet-stream". """ headers = self._request_headers(accept=accept) url = self._request_url(api) return self._client.stream( "GET", url, headers=headers, follow_redirects=True ) async def __aenter__(self) -> "GitHubAsyncRESTClient": await self._client.__aenter__() return self async def __aexit__( self, exc_type: Optional[Type[BaseException]], exc_value: Optional[BaseException], traceback: Optional[TracebackType], ) -> Optional[bool]: return await self._client.__aexit__( # type: ignore exc_type, exc_value, traceback ) class GitHubAsyncREST: """ Base class for GitHub asynchronous REST classes """ def __init__(self, client: GitHubAsyncRESTClient): self._client = client async def _get_paged_items( self, api: str, name: str, model_cls: Type[GitHubModel], *, params: Optional[Params] = None, ) -> AsyncIterator[GitHubModel]: """ Internal method to get the paged items information from different REST URLs. """ request_params: dict[str, ParamValue] = {} if params: request_params.update(params) request_params["per_page"] = "100" # max number async for response in self._client.get_all(api, params=request_params): response.raise_for_status() data: JSON_OBJECT = response.json() for item in data.get(name, []): # type: ignore yield model_cls.from_dict(item) # type:ignore pontos-25.3.2/pontos/github/api/code_scanning.py000066400000000000000000000620161476255566300216760ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later import base64 import gzip from datetime import datetime from typing import AsyncIterator, Iterable, Optional, Union from pontos.github.api.client import GitHubAsyncREST from pontos.github.api.helper import JSON_OBJECT from pontos.github.models.base import SortOrder from pontos.github.models.code_scanning import ( AlertSort, AlertState, Analysis, CodeQLDatabase, CodeScanningAlert, DefaultSetup, DefaultSetupState, DismissedReason, Instance, Language, QuerySuite, SarifUploadInformation, Severity, ) from pontos.helper import enum_or_value class GitHubAsyncRESTCodeScanning(GitHubAsyncREST): async def _alerts( self, api: str, *, tool_name: Optional[str] = None, tool_guid: Optional[str] = "", severity: Union[Severity, str, None] = None, state: Union[AlertState, str, None] = None, sort: Union[AlertSort, str] = AlertSort.CREATED, direction: Union[str, SortOrder] = SortOrder.DESC, ) -> AsyncIterator[CodeScanningAlert]: params: dict[str, Union[str, None]] = {"per_page": "100"} if tool_name: params["tool_name"] = tool_name if tool_guid or tool_guid is None: params["tool_guid"] = tool_guid if severity: params["severity"] = enum_or_value(severity) if state: params["state"] = enum_or_value(state) if sort: params["sort"] = enum_or_value(sort) if direction: params["direction"] = enum_or_value(direction) async for response in self._client.get_all(api, params=params): response.raise_for_status() for alert in response.json(): yield CodeScanningAlert.from_dict(alert) async def organization_alerts( self, organization: str, *, tool_name: Optional[str] = None, tool_guid: Optional[str] = "", severity: Union[Severity, str, None] = None, state: Union[AlertState, str, None] = None, sort: Union[AlertSort, str] = AlertSort.CREATED, direction: Union[str, SortOrder] = SortOrder.DESC, ) -> AsyncIterator[CodeScanningAlert]: """ Get the list of code scanning alerts for all repositories of a GitHub organization https://docs.github.com/en/rest/code-scanning/code-scanning#list-code-scanning-alerts-for-an-organization Args: organization: Name of the organization tool_name: The name of a code scanning tool. Only results by this tool will be listed. You can specify the tool by using either tool_name or tool_guid, but not both. tool_guid: The GUID of a code scanning tool. Only results by this tool will be listed. Note that some code scanning tools may not include a GUID in their analysis data. You can specify the tool by using either tool_guid or tool_name, but not both severity: If specified, only code scanning alerts with this severity will be returned state: Filter alerts by state resolutions sort: The property by which to sort the results. Default is to sort alerts by creation date. direction: The direction to sort the results by. Default is desc. Raises: HTTPStatusError: A httpx.HTTPStatusError is raised if the request failed. Returns: An async iterator yielding the code scanning alerts Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: async for alert in api.code_scanning.organization_alerts( "my-org" ): print(alert) """ api = f"/orgs/{organization}/code-scanning/alerts" async for alert in self._alerts( api, state=state, severity=severity, tool_guid=tool_guid, tool_name=tool_name, sort=sort, direction=direction, ): yield alert async def alerts( self, repo: str, *, tool_name: Optional[str] = None, tool_guid: Optional[str] = "", severity: Union[Severity, str, None] = None, state: Union[AlertState, str, None] = None, sort: Union[AlertSort, str] = AlertSort.CREATED, direction: Union[str, SortOrder] = SortOrder.DESC, ) -> AsyncIterator[CodeScanningAlert]: """ Get the list of code scanning alerts for a repository https://docs.github.com/en/rest/code-scanning/code-scanning#list-code-scanning-alerts-for-a-repository Args: repo: GitHub repository (owner/name) Raises: HTTPStatusError: A httpx.HTTPStatusError is raised if the request failed. Returns: An async iterator yielding the code scanning alerts Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: async for alert in api.code_scanning.alerts( "org/repo" ): print(alert) """ api = f"/repos/{repo}/code-scanning/alerts" async for alert in self._alerts( api, state=state, severity=severity, tool_guid=tool_guid, tool_name=tool_name, sort=sort, direction=direction, ): yield alert async def alert( self, repo: str, alert_number: Union[str, int], ) -> CodeScanningAlert: """ Get a single code scanning alert https://docs.github.com/en/rest/code-scanning/code-scanning#get-a-code-scanning-alert Args: repo: GitHub repository (owner/name) alert_number: The number that identifies a code scanning alert in its repository Raises: HTTPStatusError: A httpx.HTTPStatusError is raised if the request failed. Returns: Code scanning alert information Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: alert = await api.code_scanning.alert("foo/bar", 123) """ api = f"/repos/{repo}/code-scanning/alerts/{alert_number}" response = await self._client.get(api) response.raise_for_status() return CodeScanningAlert.from_dict(response.json()) async def update_alert( self, repo: str, alert_number: Union[str, int], state: Union[AlertState, str], *, dismissed_reason: Union[DismissedReason, str, None] = None, dismissed_comment: Optional[str] = None, ) -> CodeScanningAlert: """ Update a single code scanning alert https://docs.github.com/en/rest/code-scanning/code-scanning#update-a-code-scanning-alert Args: repo: GitHub repository (owner/name) alert_number: The number that identifies a code scanning alert in its repository state: The state of the alert dismissed_reason: The reason for dismissing or closing the alert dismissed_comment: The dismissal comment associated with the dismissal of the alert. Raises: HTTPStatusError: A httpx.HTTPStatusError is raised if the request failed. Returns: Code scanning alert information Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi from pontos.github.models.code_scanning import ( AlertState, DismissedReason, ) async with GitHubAsyncRESTApi(token) as api: alert = await api.code_scanning.update_alert( "foo/bar", 123, AlertState.DISMISSED, dismissed_reason=DismissedReason.WONT_FIX, dismissed_comment="Not applicable", ) """ api = f"/repos/{repo}/code-scanning/alerts/{alert_number}" data = {"state": enum_or_value(state)} if dismissed_reason: data["dismissed_reason"] = enum_or_value(dismissed_reason) if dismissed_comment: data["dismissed_comment"] = dismissed_comment response = await self._client.patch(api, data=data) response.raise_for_status() return CodeScanningAlert.from_dict(response.json()) async def instances( self, repo: str, alert_number: Union[str, int], *, ref: Optional[str] = None, ) -> AsyncIterator[Instance]: """ Lists all instances of the specified code scanning alert https://docs.github.com/en/rest/code-scanning/code-scanning#list-instances-of-a-code-scanning-alert Args: repo: GitHub repository (owner/name) alert_number: The number that identifies a code scanning alert in its repository ref: The Git reference for the results you want to list. The ref for a branch can be formatted either as refs/heads/ or simply . To reference a pull request use refs/pull//merge. Raises: HTTPStatusError: A httpx.HTTPStatusError is raised if the request failed. Returns: An async iterator yielding the code scanning alert instances Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: async for instance in api.code_scanning.instances( "org/repo", 1 ): print(instance) """ api = f"/repos/{repo}/code-scanning/alerts/{alert_number}/instances" params = {"per_page": "100"} if ref: params["ref"] = ref async for response in self._client.get_all(api, params=params): for alert in response.json(): yield Instance.from_dict(alert) async def analyses( self, repo: str, *, tool_name: Optional[str] = None, tool_guid: Optional[str] = "", ref: Optional[str] = None, sarif_id: Optional[str] = None, direction: Union[str, SortOrder] = SortOrder.DESC, ) -> AsyncIterator[Analysis]: """ Lists the details of all code scanning analyses for a repository, starting with the most recent. https://docs.github.com/en/rest/code-scanning/code-scanning#list-code-scanning-analyses-for-a-repository Args: repo: GitHub repository (owner/name) tool_name: The name of a code scanning tool. Only results by this tool will be listed. You can specify the tool by using either tool_name or tool_guid, but not both. tool_guid: The GUID of a code scanning tool. Only results by this tool will be listed. Note that some code scanning tools may not include a GUID in their analysis data. You can specify the tool by using either tool_guid or tool_name, but not both ref: The Git reference for the analyses you want to list. The ref for a branch can be formatted either as refs/heads/ or simply . To reference a pull request use refs/pull//merge. sarif_id: Filter analyses belonging to the same SARIF upload Raises: HTTPStatusError: A httpx.HTTPStatusError is raised if the request failed. Returns: An async iterator yielding the code scanning alert analysis data Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: async for data in api.code_scanning.analyses( "org/repo" ): print(data) """ api = f"/repos/{repo}/code-scanning/analyses" params: dict[str, Union[str, None]] = {"per_page": "100"} if tool_name: params["tool_name"] = tool_name if tool_guid or tool_guid is None: params["tool_guid"] = tool_guid if ref: params["ref"] = ref if sarif_id: params["sarif_id"] = sarif_id if direction: params["direction"] = enum_or_value(direction) async for response in self._client.get_all(api, params=params): response.raise_for_status() for alert in response.json(): yield Analysis.from_dict(alert) async def analysis( self, repo: str, analysis_id: Union[int, str], ) -> Analysis: """ Gets a specified code scanning analysis for a repository https://docs.github.com/en/rest/code-scanning/code-scanning#get-a-code-scanning-analysis-for-a-repository Args: repo: GitHub repository (owner/name) analysis_id: The ID of the analysis Raises: HTTPStatusError: A httpx.HTTPStatusError is raised if the request failed. Returns: Code scanning alert analysis data Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: analysis = await api.code_scanning.analysis( "org/repo", 123 ) print(analysis) """ api = f"/repos/{repo}/code-scanning/analyses/{analysis_id}" response = await self._client.get(api) response.raise_for_status() return Analysis.from_dict(response.json()) async def delete_analysis( self, repo: str, analysis_id: Union[int, str], ) -> dict[str, str]: """ Delete a specified code scanning analysis from a repository https://docs.github.com/en/rest/code-scanning/code-scanning#delete-a-code-scanning-analysis-from-a-repository Args: repo: GitHub repository (owner/name) analysis_id: The ID of the analysis Raises: HTTPStatusError: A httpx.HTTPStatusError is raised if the request failed. Returns: See the GitHub documentation for the response object Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: await api.code_scanning.delete( "org/repo", 123 ) """ api = f"/repos/{repo}/code-scanning/analyses/{analysis_id}" response = await self._client.delete(api) response.raise_for_status() return response.json() async def codeql_databases( self, repo: str, ) -> AsyncIterator[CodeQLDatabase]: """ List the CodeQL databases that are available in a repository. https://docs.github.com/en/rest/code-scanning/code-scanning#list-codeql-databases-for-a-repository Args: repo: GitHub repository (owner/name) Raises: HTTPStatusError: A httpx.HTTPStatusError is raised if the request failed. Returns: An async iterator yielding the code scanning codeql database information Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: async for database in api.code_scanning.codeql_databases( "org/repo" ): print(database) """ api = f"/repos/{repo}/code-scanning/codeql/databases" params = {"per_page": "100"} async for response in self._client.get_all(api, params=params): response.raise_for_status() for alert in response.json(): yield CodeQLDatabase.from_dict(alert) async def codeql_database( self, repo: str, language: str, ) -> CodeQLDatabase: """ Get a CodeQL database for a language in a repository https://docs.github.com/en/rest/code-scanning/code-scanning#get-a-codeql-database-for-a-repository Args: repo: GitHub repository (owner/name) language: The language of the CodeQL database Raises: HTTPStatusError: A httpx.HTTPStatusError is raised if the request failed. Returns: Code scanning CodeQL database information Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: db = await api.code_scanning.codeql_database( "org/repo", "java" ) print(db) """ api = f"/repos/{repo}/code-scanning/codeql/databases/{language}" response = await self._client.get(api) response.raise_for_status() return CodeQLDatabase.from_dict(response.json()) async def default_setup( self, repo: str, ) -> DefaultSetup: """ Gets a code scanning default setup configuration https://docs.github.com/en/rest/code-scanning/code-scanning#get-a-code-scanning-default-setup-configuration Args: repo: GitHub repository (owner/name) Raises: HTTPStatusError: A httpx.HTTPStatusError is raised if the request failed. Returns: Code scanning default setup Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: setup = await api.code_scanning.default_setup( "org/repo" ) print(setup) """ api = f"/repos/{repo}/code-scanning/default-setup" response = await self._client.get(api) response.raise_for_status() return DefaultSetup.from_dict(response.json()) async def update_default_setup( self, repo: str, state: Union[str, DefaultSetupState], query_suite: Union[str, QuerySuite], languages: Iterable[Union[str, Language]], ) -> dict[str, str]: """ Updates a code scanning default setup configuration https://docs.github.com/en/rest/code-scanning/code-scanning#update-a-code-scanning-default-setup-configuration Args: repo: GitHub repository (owner/name) state: Whether code scanning default setup has been configured or not query_suite: CodeQL query suite to be used languages: CodeQL languages to be analyzed Raises: HTTPStatusError: A httpx.HTTPStatusError is raised if the request failed. Returns: See the GitHub documentation for the response object Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi from pontos.github.models.code_scanning import ( DefaultSetupState, Language, QuerySuite, ) async with GitHubAsyncRESTApi(token) as api: await api.code_scanning.update_default_setup( "org/repo", state=DefaultSetupState.CONFIGURED, query_suite=QuerySuite.EXTENDED, languages=[Language.PYTHON, Language.JAVASCRIPT] ) """ api = f"/repos/{repo}/code-scanning/code-scanning/default-setup" data = { "state": enum_or_value(state), "query_suite": enum_or_value(query_suite), "languages": [enum_or_value(value) for value in languages], } response = await self._client.patch(api, data=data) response.raise_for_status() return response.json() async def upload_sarif_data( self, repo: str, commit_sha: str, ref: str, sarif: bytes, *, checkout_uri: Optional[str] = None, started_at: Optional[datetime] = None, tool_name: Optional[str] = None, validate: Optional[bool] = None, ) -> dict[str, str]: """ Upload SARIF data containing the results of a code scanning analysis to make the results available in a repository https://docs.github.com/en/rest/code-scanning/code-scanning#upload-an-analysis-as-sarif-data Args: repo: GitHub repository (owner/name) commit_sha: The SHA of the commit to which the analysis you are uploading relates ref: The full Git reference, formatted as refs/heads/, refs/pull//merge, or refs/pull//head sarif: checkout_uri: The base directory used in the analysis, as it appears in the SARIF file started_at: The time that the analysis run began tool_name: The name of the tool used to generate the code scanning analysis validate: Whether the SARIF file will be validated according to the code scanning specifications Raises: HTTPStatusError: A httpx.HTTPStatusError is raised if the request failed. Returns: See the GitHub documentation for the response object Example: .. code-block:: python from pathlib import Path from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: json = await api.code_scanning.upload_sarif_data( "org/repo", commit_sha="4b6472266afd7b471e86085a6659e8c7f2b119da", ref="refs/heads/main", sarif=Path("/path/to/sarif.file").read_bytes(), ) print(json["id"]) """ api = f"/repos/{repo}/code-scanning/sarifs" data: JSON_OBJECT = { "commit_sha": commit_sha, "ref": ref, } if checkout_uri: data["checkout_uri"] = checkout_uri if started_at: data["started_at"] = started_at.isoformat(timespec="seconds") if tool_name: data["tool_name"] = tool_name if validate is not None: data["validate"] = validate compressed = gzip.compress(sarif, mtime=0) encoded = base64.b64encode(compressed).decode(encoding="ascii") data["sarif"] = encoded response = await self._client.post(api, data=data) response.raise_for_status() return response.json() async def sarif(self, repo: str, sarif_id: str) -> SarifUploadInformation: """ Gets information about a SARIF upload, including the status and the URL of the analysis that was uploaded so that you can retrieve details of the analysis https://docs.github.com/en/rest/code-scanning/code-scanning#get-information-about-a-sarif-upload Args: repo: GitHub repository (owner/name) sarif_id: The SARIF ID obtained after uploading Raises: HTTPStatusError: A httpx.HTTPStatusError is raised if the request failed. Returns: Information about the SARIF upload Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: sarif = await api.code_scanning.sarif( "org/repo", "47177e22-5596-11eb-80a1-c1e54ef945c6", ) print(sarif) """ api = f"/repos/{repo}/code-scanning/sarifs/{sarif_id}" response = await self._client.get(api) response.raise_for_status() return SarifUploadInformation.from_dict(response.json()) pontos-25.3.2/pontos/github/api/contents.py000066400000000000000000000023111476255566300207310ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # from typing import Optional from pontos.github.api.client import GitHubAsyncREST class GitHubAsyncRESTContent(GitHubAsyncREST): async def path_exists( self, repo: str, path: str, *, branch: Optional[str] = None ) -> bool: """ Check if a path (file or directory) exists in a branch of a repository Args: repo: GitHub repository (owner/name) to use path: to the file/directory in question branch: Branch to check, defaults to default branch (: Returns: True if existing, False else Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: exists = await api.contents.path_exists( "foo/bar", "src/utils.py" ) """ api = f"/repos/{repo}/contents/{path}" params = {} if branch: params["ref"] = branch response = await self._client.get(api, params=params) return response.is_success pontos-25.3.2/pontos/github/api/dependabot.py000066400000000000000000000256171476255566300212170ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later from typing import AsyncIterator, Optional, Union from pontos.github.api.client import GitHubAsyncREST from pontos.github.models.base import SortOrder from pontos.github.models.dependabot import ( AlertSort, AlertState, DependabotAlert, DependencyScope, DismissedReason, Severity, ) from pontos.helper import enum_or_value class GitHubAsyncRESTDependabot(GitHubAsyncREST): async def _alerts( self, api: str, *, state: Union[AlertState, str, None] = None, severity: Union[Severity, str, None] = None, ecosystem: Optional[str] = None, packages: Optional[list[str]] = None, scope: Union[DependencyScope, str, None] = None, sort: Union[AlertSort, str] = AlertSort.CREATED, direction: Union[str, SortOrder] = SortOrder.DESC, ) -> AsyncIterator[DependabotAlert]: params = {"per_page": "100"} if state: params["state"] = enum_or_value(state) if severity: params["severity"] = enum_or_value(severity) if ecosystem: params["ecosystem"] = ecosystem if packages: # as per REST api docu this param is passed as package (singular!) params["package"] = ",".join(packages) if scope: params["scope"] = enum_or_value(scope) if sort: params["sort"] = enum_or_value(sort) if direction: params["direction"] = enum_or_value(direction) async for response in self._client.get_all(api, params=params): response.raise_for_status() for alert in response.json(): yield DependabotAlert.from_dict(alert) async def enterprise_alerts( self, enterprise: str, *, state: Union[AlertState, str, None] = None, severity: Union[Severity, str, None] = None, ecosystem: Optional[str] = None, packages: Optional[list[str]] = None, scope: Union[DependencyScope, str, None] = None, sort: Union[AlertSort, str] = AlertSort.CREATED, direction: Union[str, SortOrder] = SortOrder.DESC, ) -> AsyncIterator[DependabotAlert]: """ Get the list of dependabot alerts for all repositories of a GitHub enterprise https://docs.github.com/en/rest/dependabot/alerts#list-dependabot-alerts-for-an-enterprise Args: enterprise: Name of the enterprise state: Filter alerts by state severity: Filter alerts by severity ecosystem: Filter alerts by package ecosystem package: Return alerts only for the provided packages scope: Filter alerts by scope of the vulnerable dependency sort: The property by which to sort the results. Default is to sort alerts by creation date. direction: The direction to sort the results by. Default is desc. Raises: HTTPStatusError: A httpx.HTTPStatusError is raised if the request failed. Returns: An async iterator yielding the dependabot alerts Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: async for alert in api.dependabot.enterprise_alerts( "my-enterprise" ): print(alert) """ api = f"/enterprises/{enterprise}/dependabot/alerts" async for alert in self._alerts( api, state=state, severity=severity, ecosystem=ecosystem, packages=packages, scope=scope, sort=sort, direction=direction, ): yield alert async def organization_alerts( self, organization: str, *, state: Union[AlertState, str, None] = None, severity: Union[Severity, str, None] = None, ecosystem: Optional[str] = None, packages: Optional[list[str]] = None, scope: Union[DependencyScope, str, None] = None, sort: Union[AlertSort, str] = AlertSort.CREATED, direction: Union[str, SortOrder] = SortOrder.DESC, ) -> AsyncIterator[DependabotAlert]: """ Get the list of dependabot alerts for all repositories of a GitHub organization https://docs.github.com/en/rest/dependabot/alerts#list-dependabot-alerts-for-an-organization Args: organization: Name of the organization state: Filter alerts by state severity: Filter alerts by severity ecosystem: Filter alerts by package ecosystem package: Return alerts only for the provided packages scope: Filter alerts by scope of the vulnerable dependency sort: The property by which to sort the results. Default is to sort alerts by creation date. direction: The direction to sort the results by. Default is desc. Raises: HTTPStatusError: A httpx.HTTPStatusError is raised if the request failed. Returns: An async iterator yielding the dependabot alerts Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: async for alert in api.dependabot.organization_alerts( "my-enterprise" ): print(alert) """ api = f"/orgs/{organization}/dependabot/alerts" async for alert in self._alerts( api, state=state, severity=severity, ecosystem=ecosystem, packages=packages, scope=scope, sort=sort, direction=direction, ): yield alert async def alerts( self, repo: str, *, state: Union[AlertState, str, None] = None, severity: Union[Severity, str, None] = None, ecosystem: Optional[str] = None, packages: Optional[list[str]] = None, scope: Union[DependencyScope, str, None] = None, sort: Union[AlertSort, str] = AlertSort.CREATED, direction: Union[str, SortOrder] = SortOrder.DESC, ) -> AsyncIterator[DependabotAlert]: """ Get the list of dependabot alerts for a repository https://docs.github.com/en/rest/dependabot/alerts#list-dependabot-alerts-for-a-repository Args: repo: GitHub repository (owner/name) state: Filter alerts by state severity: Filter alerts by severity ecosystem: Filter alerts by package ecosystem package: Return alerts only for the provided packages scope: Filter alerts by scope of the vulnerable dependency sort: The property by which to sort the results. Default is to sort alerts by creation date. direction: The direction to sort the results by. Default is desc. Raises: HTTPStatusError: A httpx.HTTPStatusError is raised if the request failed. Returns: An async iterator yielding the dependabot alerts Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: async for alert in api.dependabot.alerts( "my-enterprise" ): print(alert) """ api = f"/repos/{repo}/dependabot/alerts" async for alert in self._alerts( api, state=state, severity=severity, ecosystem=ecosystem, packages=packages, scope=scope, sort=sort, direction=direction, ): yield alert async def alert( self, repo: str, alert_number: Union[str, int], ) -> DependabotAlert: """ Get a single dependabot alert https://docs.github.com/en/rest/dependabot/alerts#get-a-dependabot-alert Args: repo: GitHub repository (owner/name) alert_number: The number that identifies a Dependabot alert in its repository Raises: HTTPStatusError: A httpx.HTTPStatusError is raised if the request failed. Returns: Dependabot alert information Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: alert = await api.dependabot.alert("foo/bar", 123) """ api = f"/repos/{repo}/dependabot/alerts/{alert_number}" response = await self._client.get(api) response.raise_for_status() return DependabotAlert.from_dict(response.json()) async def update_alert( self, repo: str, alert_number: Union[str, int], state: Union[AlertState, str], *, dismissed_reason: Union[DismissedReason, str, None] = None, dismissed_comment: str, ) -> DependabotAlert: """ Update a single dependabot alert https://docs.github.com/en/rest/dependabot/alerts#update-a-dependabot-alert Args: repo: GitHub repository (owner/name) alert_number: The number that identifies a Dependabot alert in its repository state: The state of the Dependabot alert dismissed_reason: Required when state is dismissed. A reason for dismissing the alert. dismissed_comment: An optional comment associated with dismissing the alert Raises: HTTPStatusError: A httpx.HTTPStatusError is raised if the request failed. Returns: Dependabot alert information Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: alert = await api.dependabot.update( "foo/bar", 123, AlertState.FIXED, ) """ api = f"/repos/{repo}/dependabot/alerts/{alert_number}" data = {"state": enum_or_value(state)} if dismissed_comment: data["dismissed_comment"] = dismissed_comment if dismissed_reason: data["dismissed_reason"] = enum_or_value(dismissed_reason) response = await self._client.patch(api, data=data) response.raise_for_status() return DependabotAlert.from_dict(response.json()) pontos-25.3.2/pontos/github/api/errors.py000066400000000000000000000003421476255566300204120ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # from pontos.errors import PontosError class GitHubApiError(PontosError): """ Error while using the GitHub API """ pontos-25.3.2/pontos/github/api/foo.json000066400000000000000000000151471476255566300202130ustar00rootroot00000000000000{ 'id': 4917607, 'name': 'asset-management-backend', 'package_type': 'container', 'owner': { 'login': 'greenbone', 'id': 31986857, 'node_id': 'MDEyOk9yZ2FuaXphdGlvbjMxOTg2ODU3', 'avatar_url': 'https: //avatars.githubusercontent.com/u/31986857?v=4', 'gravatar_id': '', 'url': 'https://api.github.com/users/greenbone', 'html_url': 'https://github.com/greenbone', 'followers_url': 'https://api.github.com/users/greenbone/followers', 'following_url': 'https://api.github.com/users/greenbone/following{/other_user}', 'gists_url': 'https://api.github.com/users/greenbone/gists{/gist_id}', 'starred_url': 'https://api.github.com/users/greenbone/starred{/owner}{/repo}', 'subscriptions_url': 'https://api.github.com/users/greenbone/subscriptions', 'organizations_url': 'https://api.github.com/users/greenbone/orgs', 'repos_url': 'https://api.github.com/users/greenbone/repos', 'events_url': 'https://api.github.com/users/greenbone/events{/privacy}', 'received_events_url': 'https://api.github.com/users/greenbone/received_events', 'type': 'Organization', 'site_admin': False }, 'version_count': 7686, 'visibility': 'public', 'url': 'https://api.github.com/orgs/greenbone/packages/container/asset-management-backend', 'created_at': '2023-03-16T14:39:23Z', 'updated_at': '2024-05-06T07:46:34Z', 'repository': { 'id': 498688333, 'node_id': 'R_kgDOHblhTQ', 'name': 'asset-management-backend', 'full_name': 'greenbone/asset-management-backend', 'private': True, 'owner': {'login': 'greenbone', 'id': 31986857, 'node_id': 'MDEyOk9yZ2FuaXphdGlvbjMxOTg2ODU3', 'avatar_url': 'https://avatars.githubusercontent.com/u/31986857?v=4', 'gravatar_id': '', 'url': 'https://api.github.com/users/greenbone', 'html_url': 'https://github.com/greenbone', 'followers_url': 'https://api.github.com/users/greenbone/followers', 'following_url': 'https://api.github.com/users/greenbone/following{/other_user}', 'gists_url': 'https://api.github.com/users/greenbone/gists{/gist_id}', 'starred_url': 'https://api.github.com/users/greenbone/starred{/owner}{/repo}', 'subscriptions_url': 'https://api.github.com/users/greenbone/subscriptions', 'organizations_url': 'https://api.github.com/users/greenbone/orgs', 'repos_url': 'https://api.github.com/users/greenbone/repos', 'events_url': 'https://api.github.com/users/greenbone/events{/privacy}', 'received_events_url': 'https://api.github.com/users/greenbone/received_events', 'type': 'Organization', 'site_admin': False }, 'html_url': 'https://github.com/greenbone/asset-management-backend', 'description': 'Backend for the asset-management-frontend that provides REST endpoints with the main functionality of the Opensight Asset product.', 'fork': False, 'url': 'https://api.github.com/repos/greenbone/asset-management-backend', 'forks_url': 'https://api.github.com/repos/greenbone/asset-management-backend/forks', 'keys_url': 'https://api.github.com/repos/greenbone/asset-management-backend/keys{/key_id}', 'collaborators_url': 'https://api.github.com/repos/greenbone/asset-management-backend/collaborators{/collaborator}', 'teams_url': 'https://api.github.com/repos/greenbone/asset-management-backend/teams', 'hooks_url': 'https://api.github.com/repos/greenbone/asset-management-backend/hooks', 'issue_events_url': 'https://api.github.com/repos/greenbone/asset-management-backend/issues/events{/number}', 'events_url': 'https://api.github.com/repos/greenbone/asset-management-backend/events', 'assignees_url': 'https://api.github.com/repos/greenbone/asset-management-backend/assignees{/user}', 'branches_url': 'https://api.github.com/repos/greenbone/asset-management-backend/branches{/branch}', 'tags_url': 'https://api.github.com/repos/greenbone/asset-management-backend/tags', 'blobs_url': 'https://api.github.com/repos/greenbone/asset-management-backend/git/blobs{/sha}', 'git_tags_url': 'https://api.github.com/repos/greenbone/asset-management-backend/git/tags{/sha}', 'git_refs_url': 'https://api.github.com/repos/greenbone/asset-management-backend/git/refs{/sha}', 'trees_url': 'https://api.github.com/repos/greenbone/asset-management-backend/git/trees{/sha}', 'statuses_url': 'https://api.github.com/repos/greenbone/asset-management-backend/statuses/{sha}', 'languages_url': 'https://api.github.com/repos/greenbone/asset-management-backend/languages', 'stargazers_url': 'https://api.github.com/repos/greenbone/asset-management-backend/stargazers', 'contributors_url': 'https://api.github.com/repos/greenbone/asset-management-backend/contributors', 'subscribers_url': 'https://api.github.com/repos/greenbone/asset-management-backend/subscribers', 'subscription_url': 'https://api.github.com/repos/greenbone/asset-management-backend/subscription', 'commits_url': 'https://api.github.com/repos/greenbone/asset-management-backend/commits{/sha}', 'git_commits_url': 'https://api.github.com/repos/greenbone/asset-management-backend/git/commits{/sha}', 'comments_url': 'https://api.github.com/repos/greenbone/asset-management-backend/comments{/number}', 'issue_comment_url': 'https://api.github.com/repos/greenbone/asset-management-backend/issues/comments{/number}', 'contents_url': 'https://api.github.com/repos/greenbone/asset-management-backend/contents/{+path}', 'compare_url': 'https://api.github.com/repos/greenbone/asset-management-backend/compare/{base}...{head}', 'merges_url': 'https://api.github.com/repos/greenbone/asset-management-backend/merges', 'archive_url': 'https://api.github.com/repos/greenbone/asset-management-backend/{archive_format}{/ref}', 'downloads_url': 'https://api.github.com/repos/greenbone/asset-management-backend/downloads', 'issues_url': 'https://api.github.com/repos/greenbone/asset-management-backend/issues{/number}', 'pulls_url': 'https://api.github.com/repos/greenbone/asset-management-backend/pulls{/number}', 'milestones_url': 'https://api.github.com/repos/greenbone/asset-management-backend/milestones{/number}', 'notifications_url': 'https://api.github.com/repos/greenbone/asset-management-backend/notifications{?since,all,participating}', 'labels_url': 'https://api.github.com/repos/greenbone/asset-management-backend/labels{/name}', 'releases_url': 'https://api.github.com/repos/greenbone/asset-management-backend/releases{/id}', 'deployments_url': 'https://api.github.com/repos/greenbone/asset-management-backend/deployments' }, 'html_url': 'https://github.com/orgs/greenbone/packages/container/package/asset-management-backend' }pontos-25.3.2/pontos/github/api/helper.py000066400000000000000000000012751476255566300203630ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # from typing import Dict, List, Optional, Union import httpx DEFAULT_GITHUB_API_URL = "https://api.github.com" DEFAULT_TIMEOUT = 180.0 # three minutes DEFAULT_TIMEOUT_CONFIG = httpx.Timeout(DEFAULT_TIMEOUT) # three minutes JSON_OBJECT = Dict[str, Union[str, bool, int]] # pylint: disable=invalid-name JSON = Union[List[JSON_OBJECT], JSON_OBJECT] # pylint: disable=invalid-name def _get_next_url(response: httpx.Response) -> Optional[str]: if response and response.links: try: return response.links["next"]["url"] except KeyError: pass return None pontos-25.3.2/pontos/github/api/labels.py000066400000000000000000000053651476255566300203520ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # from typing import AsyncIterator, Iterable, Union from pontos.github.api.client import GitHubAsyncREST from pontos.github.api.helper import JSON class GitHubAsyncRESTLabels(GitHubAsyncREST): async def get_all( self, repo: str, issue: Union[int, str], ) -> AsyncIterator[str]: """ Get all labels that are set in the issue/pr Args: repo: GitHub repository (owner/name) to use issue: Issue/Pull request number Returns: An async iterator yielding the labels Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: async for label in api.labels.get_all("foo/bar", 123): print(label) """ api = f"/repos/{repo}/issues/{issue}/labels" params = {"per_page": "100"} async for response in self._client.get_all(api, params=params): response.raise_for_status() data: JSON = response.json() for label in data: yield label["name"] # type: ignore async def delete_all(self, repo: str, issue: Union[int, str]) -> None: """ Deletes all labels in the issue/pr. Args: repo: GitHub repository (owner/name) to use issue: Issue/Pull request number Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: await api.labels.delete_all("foo/bar", 123) """ api = f"/repos/{repo}/issues/{issue}/labels" response = await self._client.delete(api) response.raise_for_status() async def set_all( self, repo: str, issue: Union[int, str], labels: Iterable[str] ) -> None: """ Set labels in the issue/pr. Args: repo: GitHub repository (owner/name) to use issue: Issue/Pull request number labels: Iterable of labels, that should be set. Existing labels will be overwritten. Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: await api.labels.set_all("foo/bar", 123, ["bug", "doc"]) """ api = f"/repos/{repo}/issues/{issue}/labels" data: JSON = {"labels": labels} # type: ignore response = await self._client.post(api, data=data) response.raise_for_status() pontos-25.3.2/pontos/github/api/organizations.py000066400000000000000000000227341476255566300217760ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # from typing import AsyncIterator, Iterable, Optional, Union from pontos.github.api.client import GitHubAsyncREST from pontos.github.api.errors import GitHubApiError from pontos.github.models.base import User from pontos.github.models.organization import ( InvitationRole, MemberFilter, MemberRole, Repository, RepositoryType, ) from pontos.helper import enum_or_value class GitHubAsyncRESTOrganizations(GitHubAsyncREST): async def exists(self, organization: str) -> bool: """ Check if an organization exists Args: repo: GitHub repository (owner/name) to use Returns: True if the organization exists Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: exists = api.organizations.exists("foo") """ api = f"/orgs/{organization}" response = await self._client.get(api) return response.is_success async def get_repositories( self, organization: str, *, repository_type: Union[RepositoryType, str] = RepositoryType.ALL, ) -> AsyncIterator[Repository]: """ Get information about organization repositories https://docs.github.com/en/rest/repos/repos#list-organization-repositories Args: organization: GitHub organization to use repository_type: Only list repositories of this type. Raises: `httpx.HTTPStatusError`: If there was an error in the request Return: An async iterator yielding the repositories Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: async for repo in api.organizations.get_repositories( "foo" ): print(repo) """ api = f"/orgs/{organization}/repos" params = {"type": enum_or_value(repository_type), "per_page": "100"} async for response in self._client.get_all(api, params=params): response.raise_for_status() for repo in response.json(): yield Repository.from_dict(repo) async def members( self, organization: str, *, member_filter: Union[MemberFilter, str] = MemberFilter.ALL, role: Union[MemberRole, str] = MemberRole.ALL, ) -> AsyncIterator[User]: """ Get information about organization members https://docs.github.com/en/rest/orgs/members#list-organization-members Args: organization: GitHub organization to use member_filter: Include only members of this kind. role: Filter members by their role. Raises: `httpx.HTTPStatusError`: If there was an error in the request Returns: An async iterator yielding users Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: async for user in api.organizations.members( "foo" ): print(user) """ api = f"/orgs/{organization}/members" params = { "filter": enum_or_value(member_filter), "role": enum_or_value(role), "per_page": "100", } async for response in self._client.get_all(api, params=params): response.raise_for_status() for member in response.json(): yield User.from_dict(member) async def invite( self, organization: str, *, email: Optional[str] = None, invitee_id: Optional[Union[str, int]] = None, role: Union[InvitationRole, str] = InvitationRole.DIRECT_MEMBER, team_ids: Optional[Iterable[Union[str, int]]] = None, ) -> None: """ Invite a user to a GitHub Organization https://docs.github.com/en/rest/orgs/members#create-an-organization-invitation Args: organization: GitHub organization to use email: Email address of the person you are inviting, which can be an existing GitHub user. Either an email or an invitee_id must be given. invitee_id: GitHub user ID for the person you are inviting. Either an email or an invitee_id must be given. role: The role for the new member. admin - Organization owners with full administrative rights to the organization and complete access to all repositories and teams. direct_member - Non-owner organization members with ability to see other members and join teams by invitation. billing_manager - Non-owner organization members with ability to manage the billing settings of your organization. team_ids: Specify IDs for the teams you want to invite new members to. Raises: `httpx.HTTPStatusError`: If there was an error in the request Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: await api.organizations.invite("foo", email="john@doe.com") """ if not email and not invitee_id: raise GitHubApiError("Either email or invitee_id must be provided") api = f"/orgs/{organization}/invitations" data = {"role": enum_or_value(role)} if team_ids: data["team_ids"] = list(team_ids) if invitee_id: data["invitee_id"] = invitee_id else: data["email"] = email response = await self._client.post(api, data=data) response.raise_for_status() async def remove_member( self, organization: str, username: str, ) -> None: """ Remove a member from a GitHub Organization https://docs.github.com/en/rest/orgs/members#remove-organization-membership-for-a-user Args: organization: GitHub organization to use username: The handle for the GitHub user account to remove from the organization. Raises: `httpx.HTTPStatusError`: If there was an error in the request Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: await api.organizations.remove_member("foo", "a_user") """ api = f"/orgs/{organization}/memberships/{username}" response = await self._client.delete(api) response.raise_for_status() async def outside_collaborators( self, organization: str, *, member_filter: Union[MemberFilter, str] = MemberFilter.ALL, ) -> AsyncIterator[User]: """ Get information about outside collaborators of an organization https://docs.github.com/en/rest/orgs/outside-collaborators#list-outside-collaborators-for-an-organization Args: organization: GitHub organization to use member_filter: Filter the list of outside collaborators. Raises: `httpx.HTTPStatusError`: If there was an error in the request Returns: An async iterator yielding users Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: async for user in api.organizations.outside_collaborators( "foo" ): print(user) """ api = f"/orgs/{organization}/outside_collaborators" params = { "filter": enum_or_value(member_filter), "per_page": "100", } async for response in self._client.get_all(api, params=params): response.raise_for_status() for member in response.json(): yield User.from_dict(member) async def remove_outside_collaborator( self, organization: str, username: str, ) -> None: """ Remove an outside collaborator from a GitHub Organization https://docs.github.com/en/rest/orgs/outside-collaborators#remove-outside-collaborator-from-an-organization Args: organization: GitHub organization to use username: The handle for the GitHub user account to remove from the organization. Raises: `httpx.HTTPStatusError`: If there was an error in the request Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: await api.organizations.remove_outside_collaborator( "foo", "a_user" ) """ api = f"/orgs/{organization}/outside_collaborators/{username}" response = await self._client.delete(api) response.raise_for_status() pontos-25.3.2/pontos/github/api/packages.py000066400000000000000000000274751476255566300206740ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2024 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # from typing import AsyncIterator from pontos.github.api.client import GitHubAsyncREST from pontos.github.api.errors import GitHubApiError from pontos.github.models.packages import Package, PackageType, PackageVersion class GitHubAsyncRESTPackages(GitHubAsyncREST): async def exists( self, organization: str, package_type: PackageType, package_name: str ) -> bool: """ Check if a package exists Args: package: GitHub package (owner/name) to use Returns: True if the package exists Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: exists = api.packages.exists("foo") """ api = f"/orgs/{organization}/packages/{package_type}/{package_name}" response = await self._client.get(api) return response.is_success async def package( self, organization: str, package_type: PackageType, package_name: str ) -> Package: """ Get information about a package https://docs.github.com/en/rest/reference/packages#get-a-package-for-an-organization Args: organization: GitHub organization to use package_type: Type of the package to get package_name: Name of the package to get Raises: `httpx.HTTPStatusError`: If there was an error in the request Returns: Package information Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: package = api.packages.package( organization="foo", package_type="container", package_name="bar", ): print(package.name) """ api = f"/orgs/{organization}/packages/{package_type}/{package_name}" response = await self._client.get(api) response.raise_for_status() return Package.from_dict(response.json()) async def packages( self, organization: str, package_type: str ) -> AsyncIterator[Package]: """ Get information about organization packages https://docs.github.com/en/rest/reference/packages#list-packages-for-an-organization Args: organization: GitHub organization to use package_type: Type of the packages to list Raises: `httpx.HTTPStatusError`: If there was an error in the request Returns: An async iterator yielding packages information Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: async for package in api.packages.packages( organization="foo", package_type="container", ): print(package) """ api = f"/orgs/{organization}/packages/{package_type}" params = {"per_page": "100"} async for response in self._client.get_all(api, params=params): response.raise_for_status() for package in response.json(): yield Package.from_dict(package) async def package_version( self, organization: str, package_type: PackageType, package_name: str, version: int, ) -> PackageVersion: """ Get information about a package version https://docs.github.com/en/rest/reference/packages#get-a-package-version-for-an-organization Args: organization: GitHub organization to use package_type: Type of the package to get package_name: Name of the package to get version: Version of the package to get Raises: `httpx.HTTPStatusError`: If there was an error in the request Returns: Package version information Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: package = api.packages.package_version( organization="foo", package_type="container", package_name="bar", version=1, ): print(package.version) """ api = f"/orgs/{organization}/packages/{package_type}/{package_name}/versions/{version}" response = await self._client.get(api) if not response.is_success: raise GitHubApiError(response) return PackageVersion.from_dict(response.json()) async def package_versions( self, organization: str, package_type: PackageType, package_name: str ) -> AsyncIterator[PackageVersion]: """ Get information about package versions https://docs.github.com/en/rest/reference/packages#list-package-versions-for-an-organization Args: organization: GitHub organization to use package_type: Type of the package to get package_name: Name of the package to get Raises: `httpx.HTTPStatusError`: If there was an error in the request Returns: An async iterator yielding package versions Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: async for package in api.packages.package_versions( organization="foo", package_type="container", package_name="bar", ): print(package) """ api = f"/orgs/{organization}/packages/{package_type}/{package_name}/versions" async for response in self._client.get_all(api): response.raise_for_status() versions = response.json() if versions: for version in versions: yield PackageVersion.from_dict(version) async def package_version_tags( self, organization: str, package_type: PackageType, package_name: str, version: int, ) -> list[str]: """ Get information about package version tags Uses https://docs.github.com/en/rest/reference/packages#get-a-package-version-for-an-organization and only returns the tags Args: organization: GitHub organization to use package_type: Type of the package to get package_name: Name of the package to get version: Version of the package to get Raises: `httpx.HTTPStatusError`: If there was an error in the request Returns: List of tags for the package version Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: tags = api.packages.package_version_tags( organization="foo", package_type="container", package_name="bar", version=1, ) print(tags) """ api = f"/orgs/{organization}/packages/{package_type}/{package_name}/versions/{version}" response = await self._client.get(api) if not response.is_success: raise GitHubApiError(response) resp = response.json() return resp["metadata"]["container"]["tags"] async def delete_package( self, organization: str, package_type: PackageType, package_name: str ) -> None: """ Delete a package https://docs.github.com/en/rest/reference/packages#delete-a-package-for-an-organization Args: organization: GitHub organization to use package_type: Type of the package to delete package_name: Name of the package to delete Raises: `httpx.HTTPStatusError`: If there was an error in the request Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: api.packages.delete_package( organization="foo", package_type="container", package_name="bar", ) """ api = f"/orgs/{organization}/packages/{package_type}/{package_name}" response = await self._client.delete(api) if not response.is_success: raise GitHubApiError(response) async def delete_package_version( self, organization: str, package_type: PackageType, package_name: str, version: int, ) -> None: """ Delete a package version https://docs.github.com/en/rest/reference/packages#delete-a-package-version-for-an-organization Args: organization: GitHub organization to use package_type: Type of the package to delete package_name: Name of the package to delete version: Version of the package to delete Raises: `httpx.HTTPStatusError`: If there was an error in the request Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: api.packages.delete_package_version( organization="foo", package_type="container", package_name="bar", version=1, ) """ api = f"/orgs/{organization}/packages/{package_type}/{package_name}/versions/{version}" response = await self._client.delete(api) if not response.is_success: raise GitHubApiError(response) async def delete_package_with_tag( self, organization: str, package_type: PackageType, package_name: str, tag: str, ) -> None: """ Delete a package with a specific tag Args: organization: GitHub organization to use package_type: Type of the package to delete package_name: Name of the package to delete tag: Tag of the package to delete Raises: `httpx.HTTPStatusError`: If there was an error in the request Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: api.packages.delete_package_with_tag( organization="foo", package_type="container", package_name="bar", tag="latest", ) """ async for package_version in self.package_versions( organization, package_type, package_name ): if tag in await self.package_version_tags( organization, package_type, package_name, package_version.id ): api = f"/orgs/{organization}/packages/{package_type}/{package_name}/versions/{package_version.id}" response = await self._client.delete(api) if not response.is_success: raise GitHubApiError(response) pontos-25.3.2/pontos/github/api/pull_requests.py000066400000000000000000000302671476255566300220160ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # from collections import defaultdict from pathlib import Path from typing import AsyncIterator, Dict, Iterable, List, Optional, Union from pontos.github.api.client import GitHubAsyncREST from pontos.github.api.helper import JSON_OBJECT from pontos.github.models.base import FileStatus from pontos.github.models.pull_request import ( Comment, PullRequest, PullRequestCommit, ) class GitHubAsyncRESTPullRequests(GitHubAsyncREST): async def exists(self, repo: str, pull_request: Union[int, str]) -> bool: """ Check if a single branch in a repository exists Args: repo: GitHub repository (owner/name) to use pull_request: Pull request number to check Returns: True if the pull requests exists Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: exists = await api.pull_request.exists("foo/bar", 123) """ api = f"/repos/{repo}/pulls/{pull_request}" response = await self._client.get(api) return response.is_success async def get( self, repo: str, pull_request: Union[int, str] ) -> PullRequest: """ Get information about a pull request https://docs.github.com/en/rest/pulls/pulls#get-a-pull-request Args: repo: GitHub repository (owner/name) to use pull_request: Pull request number Returns: Information about the pull request Raises: httpx.HTTPStatusError: If the request was invalid Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: pr = await api.pull_requests.get("foo/bar", 123) print(pr) """ api = f"/repos/{repo}/pulls/{pull_request}" response = await self._client.get(api) response.raise_for_status() return PullRequest.from_dict(response.json()) async def commits( self, repo: str, pull_request: Union[int, str] ) -> AsyncIterator[PullRequestCommit]: """ Get all commit information of a pull request https://docs.github.com/en/rest/pulls/pulls#list-commits-on-a-pull-request Hint: At maximum GitHub allows to receive 250 commits of a pull request. Args: repo: GitHub repository (owner/name) to use pull_request: Pull request number Returns: An async iterator yielding pull request commits Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: async for commit in api.pull_requests.commits( "foo/bar", 123 ): print(commit) """ # per default github only shows 35 commits and at max it is only # possible to receive 100 params = {"per_page": "100"} api = f"/repos/{repo}/pulls/{pull_request}/commits" async for response in self._client.get_all(api, params=params): for commit in response.json(): yield PullRequestCommit.from_dict(commit) async def create( self, repo: str, *, head_branch: str, base_branch: str, title: str, body: str, ) -> PullRequest: """ Create a new Pull Request on GitHub https://docs.github.com/en/rest/pulls/pulls#create-a-pull-request Args: repo: GitHub repository (owner/name) to use head_branch: Branch to create a pull request from base_branch: Branch as target for the pull request title: Title for the pull request body: Description for the pull request. Can be formatted in Markdown Raises: httpx.HTTPStatusError if the request was invalid Returns: A new pull request Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: pr = await api.pull_requests.create( "foo/bar", head_branch="a-new-feature", base_branch="main", title="A new Feature is ready", body="Created a new feature", ) """ api = f"/repos/{repo}/pulls" data: JSON_OBJECT = { "head": head_branch, "base": base_branch, "title": title, "body": body.replace("\\n", "\n"), } response = await self._client.post(api, data=data) response.raise_for_status() return PullRequest.from_dict(response.json()) async def update( self, repo: str, pull_request: Union[int, str], *, base_branch: Optional[str] = None, title: Optional[str] = None, body: Optional[str] = None, ) -> PullRequest: """ Update a Pull Request on GitHub https://docs.github.com/en/rest/pulls/pulls#update-a-pull-request Args: repo: GitHub repository (owner/name) to use pull_request: Pull request number base_branch: Branch as target for the pull request. Leave empty for keeping the current one. title: Title for the pull request. Leave empty for keeping the current one. body: Description for the pull request. Can be formatted in Markdown. Leave empty for keeping the current one. Raises: httpx.HTTPStatusError if the request was invalid Returns: Updated pull request Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: pr = await api.pull_requests.update( "foo/bar", 123, title="Another new Feature", ) """ api = f"/repos/{repo}/pulls/{pull_request}" data: JSON_OBJECT = {} if base_branch: data["base"] = base_branch if title: data["title"] = title if body: data["body"] = body.replace("\\n", "\n") response = await self._client.post(api, data=data) response.raise_for_status() return PullRequest.from_dict(response.json()) async def add_comment( self, repo: str, pull_request: Union[int, str], comment: str ) -> Comment: """ Add a comment to a pull request on GitHub https://docs.github.com/en/rest/issues/comments#create-an-issue-comment Args: repo: GitHub repository (owner/name) to use pull_request: Pull request number where to add a comment comment: The actual comment message. Can be formatted in Markdown. Raises: httpx.HTTPStatusError if the request was invalid Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: await api.pull_requests.add_comment( "foo/bar", 123, "A new comment for the pull request", ) """ api = f"/repos/{repo}/issues/{pull_request}/comments" data: JSON_OBJECT = {"body": comment} response = await self._client.post(api, data=data) response.raise_for_status() return Comment.from_dict(response.json()) async def update_comment( self, repo: str, comment_id: Union[str, int], comment: str ) -> Comment: """ Update a comment to a pull request on GitHub https://docs.github.com/en/rest/issues/comments#update-an-issue-comment Args: repo: GitHub repository (owner/name) to use comment_id: The unique identifier of the comment comment: The actual comment message. Can be formatted in Markdown. Raises: httpx.HTTPStatusError if the request was invalid Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: await api.pull_requests.update_comment( "foo/bar", 123, "A new comment for the pull request", ) """ api = f"/repos/{repo}/issues/comments/{comment_id}" data: JSON_OBJECT = {"body": comment} response = await self._client.post(api, data=data) response.raise_for_status() return Comment.from_dict(response.json()) async def comments( self, repo: str, pull_request: Union[int, str] ) -> AsyncIterator[Comment]: """ Get all comments of a pull request on GitHub https://docs.github.com/en/rest/issues/comments#list-issue-comments Args: repo: GitHub repository (owner/name) to use pull_request: Pull request number where to add a comment Raises: httpx.HTTPStatusError if the request was invalid Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: async for comment in api.pull_requests.comments( "foo/bar", 123, ): print(comment) """ params = {"per_page": "100"} api = f"/repos/{repo}/issues/{pull_request}/comments" async for response in self._client.get_all(api, params=params): response.raise_for_status() for comment in response.json(): yield Comment.from_dict(comment) async def files( self, repo: str, pull_request: Union[int, str], *, status_list: Optional[Iterable[FileStatus]] = None, ) -> Dict[FileStatus, Iterable[Path]]: """ Get files of a pull request https://docs.github.com/en/rest/pulls/pulls#list-pull-requests-files Hint: At maximum GitHub allows to receive 3000 files of a commit. Args: repo: GitHub repository (owner/name) to use pull_request: Pull request number status_list: Optional iterable of status change types that should be included in the response Returns: Information about the files in the pull request as a dict Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi from pontos.github.models import FileStatus async with GitHubAsyncRESTApi(token) as api: status = await api.pull_requests.files("foo/bar", 123) # list changed files print(status[FileStatus.MODIFIED]) """ # per default github only shows 35 files per page and at max it is only # possible to receive 100 params = {"per_page": "100"} api = f"/repos/{repo}/pulls/{pull_request}/files" file_dict: Dict[FileStatus, List[Path]] = defaultdict(list) async for response in self._client.get_all(api, params=params): response.raise_for_status() for f in response.json(): try: status = FileStatus(f["status"]) except ValueError: # unknown status continue if not status_list or status in status_list: file_dict[status].append(Path(f["filename"])) return file_dict # type: ignore pontos-25.3.2/pontos/github/api/release.py000066400000000000000000000266361476255566300205340ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # import asyncio from pathlib import Path from typing import ( AsyncContextManager, AsyncIterator, Iterable, Optional, Tuple, Union, ) import httpx from pontos.github.api.client import GitHubAsyncREST from pontos.github.api.helper import JSON_OBJECT from pontos.github.models.release import Release from pontos.helper import AsyncDownloadProgressIterable, download_async, upload class GitHubAsyncRESTReleases(GitHubAsyncREST): async def create( self, repo: str, tag: str, *, body: Optional[str] = None, name: Optional[str] = None, target_commitish: Optional[str] = None, draft: bool = False, prerelease: bool = False, ) -> Release: """ Create a new GitHub release https://docs.github.com/en/rest/releases/releases#create-a-release Args: repo: GitHub repository (owner/name) to use tag: The git tag for the release body: Content of the changelog for the release name: name of the release, e.g. 'pontos 1.0.0' target_commitish: Only needed when tag is not there yet draft: If the release is a draft. False by default. prerelease: If the release is a pre release. False by default. Raises: httpx.HTTPStatusError: If the request was invalid Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: await api.releases.create( "foo/bar", "v1.2.3", body="A new release", name="Bar Release 1.2.3", ) """ data: JSON_OBJECT = { "tag_name": tag, "draft": draft, "prerelease": prerelease, } if name is not None: data["name"] = name if body is not None: data["body"] = body if target_commitish is not None: data["target_commitish"] = target_commitish api = f"/repos/{repo}/releases" response = await self._client.post(api, data=data) response.raise_for_status() return Release.from_dict(response.json()) async def exists(self, repo: str, tag: str) -> bool: """ Check wether a GitHub release exists by tag Args: repo: GitHub repository (owner/name) to use tag: The git tag for the release Returns: True if the release exists Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: exists = await = api.releases.exists("foo/bar", "v1.2.3") """ api = f"/repos/{repo}/releases/tags/{tag}" response = await self._client.get(api) return response.is_success async def get(self, repo: str, tag: str) -> Release: """ Get data of a GitHub release by tag https://docs.github.com/en/rest/releases/releases#get-a-release-by-tag-name Args: repo: GitHub repository (owner/name) to use tag: The git tag for the release Raises: httpx.HTTPStatusError: If the request was invalid Returns: Information about the release Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: release = await api.releases.get("foo/bar", "v1.2.3) print(release) """ api = f"/repos/{repo}/releases/tags/{tag}" response = await self._client.get(api) response.raise_for_status() return Release.from_dict(response.json()) def download_release_tarball( self, repo: str, tag: str ) -> AsyncContextManager[AsyncDownloadProgressIterable[bytes]]: # pylint: disable=line-too-long """ Download a release tarball (tar.gz) file Args: repo: GitHub repository (owner/name) to use tag: The git tag for the release Raises: HTTPStatusError: If the request was invalid Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api, async with api.releases.download_release_tarball( "foo/bar", "v1.0.0" ) as download: file_path = Path("a.file.tar.gz") with file_path.open("wb") as f: async for content, progress in download: f.write(content) print(progress) """ # noqa: E501 api = f"https://github.com/{repo}/archive/refs/tags/{tag}.tar.gz" return download_async(self._client.stream(api), url=api) def download_release_zip( self, repo: str, tag: str, ) -> AsyncContextManager[AsyncDownloadProgressIterable[bytes]]: # pylint: disable=line-too-long """ Download a release zip file Args: repo: GitHub repository (owner/name) to use tag: The git tag for the release Raises: HTTPStatusError: If the request was invalid Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api, async with api.releases.download_release_zip( "foo/bar", "v1.0.0" ) as download: file_path = Path("a.file.zip") with file_path.open("wb") as f: async for content, progress in download: f.write(content) print(progress) """ # noqa: E501 api = f"https://github.com/{repo}/archive/refs/tags/{tag}.zip" return download_async(self._client.stream(api), url=api) async def download_release_assets( self, repo: str, tag: str, *, match_pattern: Optional[str] = None, ) -> AsyncIterator[ Tuple[str, AsyncContextManager[AsyncDownloadProgressIterable[bytes]]] ]: # pylint: disable=line-too-long """ Download release assets Args: repo: GitHub repository (owner/name) to use tag: The git tag for the release match_pattern: Optional pattern which the name of the available artifact must match. For example "*.zip". Allows to download only specific artifacts. Raises: HTTPStatusError: If the request was invalid Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: async def download_asset(name: str, download_cm) -> Path: async with download_cm as iterator: file_path = Path(name) with file_path.open("wb") as f: async for content, progress in iterator: f.write(content) print(name, progress) return file_path tasks = [] async for name, download_cm in api.releases.download_release_assets( "foo/bar, "v1.2.3", ): tasks.append(asyncio.create_task( download_asset(name, download_cm) ) file_paths = await asyncio.gather(*tasks) """ # noqa: E501 release = await self.get(repo, tag) assets_url = release.assets_url if not assets_url: raise RuntimeError("assets URL not found") response = await self._client.get(assets_url) response.raise_for_status() assets_json = response.json() for asset_json in assets_json: # use browser_download_url here because url doesn't response with # exactly the same data on every request. # not getting exactly the same data changes the hash sum. asset_url: str = asset_json.get("browser_download_url", "") name: str = asset_json.get("name", "") if match_pattern and not Path(name).match(match_pattern): continue yield name, download_async( self._client.stream(asset_url), url=asset_url ) async def upload_release_assets( self, repo: str, tag: str, files: Iterable[Union[Path, Tuple[Path, str]]], ) -> AsyncIterator[Path]: # pylint: disable=line-too-long """ Upload release assets asynchronously Args: repo: GitHub repository (owner/name) to use tag: The git tag for the release files: An iterable of file paths or an iterable of tuples containing a file path and content types to upload as an asset Returns: yields each file after its upload is finished Raises: HTTPStatusError: If an upload request was invalid Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: files = (Path("foo.txt"), Path("bar.txt"),) async for uploaded_file in api.releases.upload_release_assets( "foo/bar", "1.2.3", files ): print(f"Uploaded: {uploaded_file}") files = [ (Path("foo.txt"), "text/ascii"), (Path("bar.pdf"), "application/pdf"), ] async for uploaded_file in api.releases.upload_release_assets( "foo/bar", "1.2.3", files ): print(f"Uploaded: {uploaded_file}") """ # noqa: E501 release = await self.get(repo, tag) asset_url = release.upload_url.replace("{?name,label}", "") async def upload_file( file_path: Path, content_type: str ) -> Tuple[httpx.Response, Path]: response = await self._client.post( asset_url, params={"name": file_path.name}, content_type=content_type, content_length=file_path.stat().st_size, content=upload(file_path), # type: ignore ) return response, file_path tasks = [] for file_path in files: if isinstance(file_path, tuple): file_path, content_type = file_path # noqa: PLW2901 else: content_type = "application/octet-stream" tasks.append(upload_file(file_path, content_type)) for coroutine in asyncio.as_completed(tasks): response, file_path = await coroutine response.raise_for_status() yield file_path pontos-25.3.2/pontos/github/api/repositories.py000066400000000000000000000546321476255566300216400ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # from typing import Any, Iterable, Optional, Union from pontos.github.api.client import GitHubAsyncREST from pontos.github.api.helper import JSON_OBJECT from pontos.github.models.organization import ( GitIgnoreTemplate, LicenseType, MergeCommitMessage, MergeCommitTitle, Repository, SquashMergeCommitMessage, SquashMergeCommitTitle, ) from pontos.helper import enum_or_value from pontos.typing import SupportsStr class GitHubAsyncRESTRepositories(GitHubAsyncREST): async def get(self, repo: str) -> Repository: """ Get a repository https://docs.github.com/en/rest/repos/repos#get-a-repository Args: repo: GitHub repository (owner/name) to request Raises: HTTPStatusError: A httpx.HTTPStatusError is raised if the request failed. Returns: Information about the repository Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: repo = await api.repositories.get("foo/bar") print(repo) """ api = f"/repos/{repo}" response = await self._client.get(api) response.raise_for_status() return Repository.from_dict(response.json()) async def delete(self, repo: str) -> None: """ Delete a repository Args: repo: GitHub repository (owner/name) to delete Raises: HTTPStatusError: A httpx.HTTPStatusError is raised if the request failed. Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: await api.repositories.delete("foo/bar") """ api = f"/repos/{repo}" response = await self._client.delete(api) response.raise_for_status() async def create( self, organization: str, name: str, *, description: Optional[str] = None, homepage: Optional[str] = None, private: Optional[bool] = False, has_issues: Optional[bool] = True, has_projects: Optional[bool] = True, has_wiki: Optional[bool] = True, has_downloads: Optional[bool] = True, is_template: Optional[bool] = False, team_id: Optional[SupportsStr] = None, auto_init: Optional[bool] = False, gitignore_template: Optional[Union[GitIgnoreTemplate, str]] = None, license_template: Optional[Union[LicenseType, str]] = None, allow_squash_merge: Optional[bool] = True, allow_merge_commit: Optional[bool] = True, allow_rebase_merge: Optional[bool] = True, allow_auto_merge: Optional[bool] = False, allow_update_branch: Optional[bool] = False, delete_branch_on_merge: Optional[bool] = False, squash_merge_commit_title: Optional[ Union[SquashMergeCommitTitle, str] ] = None, squash_merge_commit_message: Optional[ Union[SquashMergeCommitMessage, str] ] = None, merge_commit_title: Optional[Union[MergeCommitTitle, str]] = None, merge_commit_message: Optional[Union[MergeCommitMessage, str]] = None, ) -> Repository: """ Create a new repository at GitHub https://docs.github.com/en/rest/repos/repos#create-an-organization-repository Args: organization: Name of the GitHub organization where to create the new repository. name: Name of the GitHub repository. description: Description of the GitHub repository. homepage: A URL with more information about the repository. private: Whether the repository is private. Default: False. has_issues: Either True to enable issues for this repository or False to disable them. Default: True has_projects: Either True to enable projects for this repository or False to disable them. Note: If you're creating a repository in an organization that has disabled repository projects, the default is false, and if you pass true, the API returns an error. Default: true. has_wiki: Either True to enable the wiki for this repository or False to disable it. Default: True. has_downloads: Whether downloads are enabled. Default: True. is_template: Either True to make this repo available as a template repository or False to prevent it. Default: False. team_id: The id of the team that will be granted access to this repository. This is only valid when creating a repository in an organization. auto_init: Pass True to create an initial commit with empty README. Default: False. gitignore_template: Desired language or platform .gitignore template to apply. Use the name of the template without the extension. For example, "Haskell". license_template: Choose an open source license template that best suits your needs, and then use the license keyword as the license_template string. For example, "mit" or "mpl-2.0". allow_squash_merge: Either true to allow squash-merging pull requests, or false to prevent squash-merging. Default: True allow_merge_commit: Either True to allow merging pull requests with a merge commit, or False to prevent merging pull requests with merge commits. Default: True. allow_rebase_merge: Either True to allow rebase-merging pull requests, or False to prevent rebase-merging. Default: True. allow_auto_merge: Either True to allow auto-merge on pull requests, or False to disallow auto-merge. Default: False. allow_update_branch: Either True to always allow a pull request head branch, that is behind its base branch, to be updated, even if it is not required to be up to date before merging, or False otherwise. Default: False. delete_branch_on_merge: Either True to allow automatically deleting head branches when pull requests are merged, or False to prevent automatic deletion. Default: False. squash_merge_commit_title: The default value for a squash merge commit title: * "PR_TITLE" - default to the pull request's title. * "COMMIT_OR_PR_TITLE" - default to the commit's title (if only one commit) or the pull request's title (when more than one commit). Can be one of: "PR_TITLE", "COMMIT_OR_PR_TITLE" squash_merge_commit_message: The default value for a squash merge commit message: * "PR_BODY" - default to the pull request's body. * "COMMIT_MESSAGES" - default to the branch's commit messages. * "BLANK" - default to a blank commit message. Can be one of: "PR_BODY", "COMMIT_MESSAGES", "BLANK" merge_commit_title: The default value for a merge commit title. * "PR_TITLE" - default to the pull request's title. * "MERGE_MESSAGE" - default to the classic title for a merge message (e.g., Merge pull request #123 from branch-name). Can be one of: "PR_TITLE", "MERGE_MESSAGE" merge_commit_message: The default value for a merge commit message. * "PR_TITLE" - default to the pull request's title. * "PR_BODY" - default to the pull request's body. * "BLANK" - default to a blank commit message. Can be one of: "PR_BODY", "PR_TITLE", "BLANK" Raises: HTTPStatusError: A httpx.HTTPStatusError is raised if the request failed. Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: repo = await api.repositories.create( "foo/bar", "baz", description="A new baz repository", private=True, allow_squash_merge=True, allow_merge_commit=False, allow_rebase_merge=True, allow_auto_merge=True, allow_update_branch=True, delete_branch_on_merge=True, ) """ api = f"/orgs/{organization}/repos" data: JSON_OBJECT = {"name": name} if description: data["description"] = description if homepage: data["homepage"] = homepage if private is not None: data["private"] = private if has_issues is not None: data["has_issues"] = has_issues if has_projects is not None: data["has_projects"] = has_projects if has_wiki is not None: data["has_wiki"] = has_wiki if is_template is not None: data["is_template"] = is_template if team_id: data["team_id"] = str(team_id) if has_downloads is not None: data["has_downloads"] = has_downloads if auto_init is not None: data["auto_init"] = auto_init if gitignore_template: data["gitignore_template"] = ( gitignore_template.value if isinstance(gitignore_template, GitIgnoreTemplate) else gitignore_template ) if license_template: data["license_template"] = enum_or_value(license_template) if allow_squash_merge is not None: data["allow_squash_merge"] = allow_squash_merge if allow_merge_commit is not None: data["allow_merge_commit"] = allow_merge_commit if allow_rebase_merge is not None: data["allow_rebase_merge"] = allow_rebase_merge if allow_auto_merge is not None: data["allow_auto_merge"] = allow_auto_merge if allow_update_branch is not None: data["allow_update_branch"] = allow_update_branch if delete_branch_on_merge is not None: data["delete_branch_on_merge"] = delete_branch_on_merge if squash_merge_commit_title: data["squash_merge_commit_title"] = enum_or_value( squash_merge_commit_title ) if squash_merge_commit_message: data["squash_merge_commit_message"] = enum_or_value( squash_merge_commit_message ) if merge_commit_title: data["merge_commit_title"] = enum_or_value(merge_commit_title) if merge_commit_message: data["merge_commit_message"] = enum_or_value(merge_commit_message) response = await self._client.post(api, data=data) response.raise_for_status() return Repository.from_dict(response.json()) async def archive(self, repo: str) -> None: """ Archive a GitHub repository WARNING: It is not possible to unarchive a repository via the API. Args: repo: GitHub repository (owner/name) to update Raises: HTTPStatusError: A httpx.HTTPStatusError is raised if the request failed. Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: await api.repositories.archive("foo/bar") """ api = f"/repos/{repo}" data: JSON_OBJECT = {"archived": True} response = await self._client.post(api, data=data) response.raise_for_status() async def update( self, repo: str, *, name: Optional[str] = None, description: Optional[str] = None, homepage: Optional[str] = None, private: Optional[bool] = False, has_issues: Optional[bool] = True, has_projects: Optional[bool] = True, has_wiki: Optional[bool] = True, is_template: Optional[bool] = False, default_branch: Optional[str] = None, allow_squash_merge: Optional[bool] = True, allow_merge_commit: Optional[bool] = True, allow_rebase_merge: Optional[bool] = True, allow_auto_merge: Optional[bool] = False, allow_update_branch: Optional[bool] = False, delete_branch_on_merge: Optional[bool] = False, squash_merge_commit_title: Optional[ Union[SquashMergeCommitTitle, str] ] = None, squash_merge_commit_message: Optional[ Union[SquashMergeCommitMessage, str] ] = None, merge_commit_title: Optional[Union[MergeCommitTitle, str]] = None, merge_commit_message: Optional[Union[MergeCommitMessage, str]] = None, allow_forking: Optional[bool] = False, web_commit_signoff_required: Optional[bool] = False, ) -> Repository: """ Create a new repository at GitHub https://docs.github.com/en/rest/repos/repos#update-a-repository Args: repo: GitHub repository (owner/name) to update name: New name of the GitHub repository. description: Description of the GitHub repository. homepage: A URL with more information about the repository. private: Whether the repository is private. Default: False. has_issues: Either True to enable issues for this repository or False to disable them. Default: True has_projects: Either True to enable projects for this repository or False to disable them. Note: If you're creating a repository in an organization that has disabled repository projects, the default is false, and if you pass true, the API returns an error. Default: true. has_wiki: Either True to enable the wiki for this repository or False to disable it. Default: True. is_template: Either True to make this repo available as a template repository or False to prevent it. Default: False. default_branch: Updates the default branch for this repository. allow_squash_merge: Either true to allow squash-merging pull requests, or false to prevent squash-merging. Default: True allow_merge_commit: Either True to allow merging pull requests with a merge commit, or False to prevent merging pull requests with merge commits. Default: True. allow_rebase_merge: Either True to allow rebase-merging pull requests, or False to prevent rebase-merging. Default: True. allow_auto_merge: Either True to allow auto-merge on pull requests, or False to disallow auto-merge. Default: False. allow_update_branch: Either True to always allow a pull request head branch that is behind its base branch to be updated even if it is not required to be up to date before merging, or False otherwise. Default: False. delete_branch_on_merge: Either True to allow automatically deleting head branches when pull requests are merged, or False to prevent automatic deletion. Default: False. squash_merge_commit_title: The default value for a squash merge commit title: * "PR_TITLE" - default to the pull request's title. * "COMMIT_OR_PR_TITLE" - default to the commit's title (if only one commit) or the pull request's title (when more than one commit). Can be one of: "PR_TITLE", "COMMIT_OR_PR_TITLE" squash_merge_commit_message: The default value for a squash merge commit message: * "PR_BODY" - default to the pull request's body. * "COMMIT_MESSAGES" - default to the branch's commit messages. * "BLANK" - default to a blank commit message. Can be one of: "PR_BODY", "COMMIT_MESSAGES", "BLANK" merge_commit_title: The default value for a merge commit title. * "PR_TITLE" - default to the pull request's title. * "MERGE_MESSAGE" - default to the classic title for a merge message (e.g., Merge pull request #123 from branch-name). Can be one of: "PR_TITLE", "MERGE_MESSAGE" merge_commit_message: The default value for a merge commit message. * "PR_TITLE" - default to the pull request's title. * "PR_BODY" - default to the pull request's body. * "BLANK" - default to a blank commit message. Can be one of: "PR_BODY", "PR_TITLE", "BLANK" allow_forking: Either True to allow private forks, or False to prevent private forks. Default: False. web_commit_signoff_required: Either True to require contributors to sign off on web-based commits, or False to not require contributors to sign off on web-based commits. Default: False. Raises: HTTPStatusError: A httpx.HTTPStatusError is raised if the request failed. Returns: The updated repository Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: repo = await api.repositories.update( "foo/bar", allow_squash_merge=True, allow_merge_commit=False, allow_rebase_merge=True, allow_auto_merge=True, allow_update_branch=True, delete_branch_on_merge=True, ) """ api = f"/repos/{repo}" data: JSON_OBJECT = {} if name: data["name"] = name if description: data["description"] = description if homepage: data["homepage"] = homepage if private is not None: data["private"] = private if has_issues is not None: data["has_issues"] = has_issues if has_projects is not None: data["has_projects"] = has_projects if has_wiki is not None: data["has_wiki"] = has_wiki if is_template is not None: data["is_template"] = is_template if default_branch: data["default_branch"] = default_branch if allow_squash_merge is not None: data["allow_squash_merge"] = allow_squash_merge if allow_merge_commit is not None: data["allow_merge_commit"] = allow_merge_commit if allow_rebase_merge is not None: data["allow_rebase_merge"] = allow_rebase_merge if allow_auto_merge is not None: data["allow_auto_merge"] = allow_auto_merge if allow_update_branch is not None: data["allow_update_branch"] = allow_update_branch if delete_branch_on_merge is not None: data["delete_branch_on_merge"] = delete_branch_on_merge if squash_merge_commit_title: data["squash_merge_commit_title"] = enum_or_value( squash_merge_commit_title ) if squash_merge_commit_message: data["squash_merge_commit_message"] = enum_or_value( squash_merge_commit_message ) if merge_commit_title: data["merge_commit_title"] = enum_or_value(merge_commit_title) if merge_commit_message: data["merge_commit_message"] = enum_or_value(merge_commit_message) if allow_forking is not None: data["allow_forking"] = allow_forking if web_commit_signoff_required is not None: data["web_commit_signoff_required"] = web_commit_signoff_required response = await self._client.post(api, data=data) response.raise_for_status() return Repository.from_dict(response.json()) async def topics(self, repo: str) -> Iterable[str]: """ List all topics of a repository Args: repo: GitHub repository (owner/name) to list the topics for Raises: HTTPStatusError: A httpx.HTTPStatusError is raised if the request failed. Returns: An iterable of topics as string Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: topics = await api.repositories.topics("foo/bar") """ api = f"/repos/{repo}/topics" response = await self._client.get(api) response.raise_for_status() data: dict[str, Any] = response.json() return data.get("names", []) async def update_topics( self, repo: str, new_topics: Iterable[str] ) -> Iterable[str]: """ Replace all topics of a repository Args: repo: GitHub repository (owner/name) to update the topics for new_topics: Iterable of new topics to set on the repository Raises: HTTPStatusError: A httpx.HTTPStatusError is raised if the request failed. Returns: An iterable of topics as string Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: topics = await api.repositories.update_topics( "foo/bar", ["foo", "bar"] ) """ api = f"/repos/{repo}/topics" data = {"names": list(new_topics)} response = await self._client.put(api, data=data) # type: ignore[arg-type] # noqa: E501 response.raise_for_status() data: dict[str, Any] = response.json() return data.get("names", []) pontos-25.3.2/pontos/github/api/search.py000066400000000000000000000051221476255566300203440ustar00rootroot00000000000000# Copyright (C) 2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # from typing import AsyncIterator, Iterable, Union from pontos.github.api.client import GitHubAsyncREST from pontos.github.models.organization import Repository from pontos.github.models.search import Qualifier, RepositorySort, SortOrder from pontos.helper import enum_or_value class GitHubAsyncRESTSearch(GitHubAsyncREST): async def repositories( self, *, keywords: Iterable[str], qualifiers: Iterable[Qualifier], order: Union[str, SortOrder] = SortOrder.DESC, sort: Union[str, RepositorySort, None] = None, ) -> AsyncIterator[Repository]: """ Search for repositories https://docs.github.com/en/rest/search#search-repositories https://docs.github.com/en/search-github/searching-on-github/searching-for-repositories Args: keywords: List of keywords to search for. qualifiers: List of qualifiers. order: Sort order either 'asc' or 'desc'. Default is 'desc'. sort: Sort the found repositories by this criteria. Raises: `httpx.HTTPStatusError`: If there was an error in the request Returns: An async iterator yielding repositories Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi from pontos.github.models import ( InReadmeQualifier, InTopicsQualifier, ) async with GitHubAsyncRESTApi(token) as api: # search for keywords in repo topics and READMEs async for repo in api.search( keywords=["utils", "search", "golang"], qualifier=[ InTopicsQualifier(), InReadmeQualifier(), ], ) """ api = "/search/repositories" params = { "per_page": "100", } if order: params["order"] = enum_or_value(order) if sort: params["sort"] = enum_or_value(sort) query = ( f"{' '.join(keywords)} " f"{' '.join([str(qualifier) for qualifier in qualifiers])}" ) params["q"] = query async for response in self._client.get_all(api, params=params): response.raise_for_status() data = response.json() for repo in data["items"]: yield Repository.from_dict(repo) pontos-25.3.2/pontos/github/api/secret_scanning.py000066400000000000000000000325231476255566300222510ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later from typing import AsyncIterator, Iterable, Optional, Union from pontos.github.api.client import GitHubAsyncREST from pontos.github.models.base import SortOrder from pontos.github.models.secret_scanning import ( AlertLocation, AlertSort, AlertState, CommitLocation, IssueBodyLocation, IssueCommentLocation, IssueTitleLocation, LocationType, Resolution, SecretScanningAlert, ) from pontos.helper import enum_or_value class GitHubAsyncRESTSecretScanning(GitHubAsyncREST): async def _alerts( self, api: str, *, state: Union[AlertState, str, None] = None, secret_types: Union[Iterable[str], None] = None, resolutions: Union[Iterable[str], None] = None, sort: Union[AlertSort, str] = AlertSort.CREATED, direction: Union[str, SortOrder] = SortOrder.DESC, ) -> AsyncIterator[SecretScanningAlert]: params = {"per_page": "100"} if state: params["state"] = enum_or_value(state) if secret_types: # as per REST api docu this param is passed as secret_type params["secret_type"] = ",".join(secret_types) if resolutions: # as per REST api docu this param is passed as resolution params["resolution"] = ",".join(resolutions) if sort: params["sort"] = enum_or_value(sort) if direction: params["direction"] = enum_or_value(direction) async for response in self._client.get_all(api, params=params): response.raise_for_status() for alert in response.json(): yield SecretScanningAlert.from_dict(alert) async def enterprise_alerts( self, enterprise: str, *, state: Union[AlertState, str, None] = None, secret_types: Union[Iterable[str], None] = None, resolutions: Union[Iterable[str], None] = None, sort: Union[AlertSort, str] = AlertSort.CREATED, direction: Union[str, SortOrder] = SortOrder.DESC, ) -> AsyncIterator[SecretScanningAlert]: """ Get the list of secret scanning alerts for all repositories of a GitHub enterprise https://docs.github.com/en/rest/secret-scanning/secret-scanning#list-secret-scanning-alerts-for-an-enterprise Args: enterprise: Name of the enterprise state: Filter alerts by state secret_types: List of secret types to return. resolutions: List secret scanning alerts with one of these provided resolutions sort: The property by which to sort the results. Default is to sort alerts by creation date. direction: The direction to sort the results by. Default is desc. Raises: HTTPStatusError: A httpx.HTTPStatusError is raised if the request failed. Returns: An async iterator yielding the secret scanning alerts Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: async for alert in api.secret_scanning.enterprise_alerts( "my-enterprise" ): print(alert) """ api = f"/enterprises/{enterprise}/secret-scanning/alerts" async for alert in self._alerts( api, state=state, secret_types=secret_types, resolutions=resolutions, sort=sort, direction=direction, ): yield alert async def organization_alerts( self, organization: str, *, state: Union[AlertState, str, None] = None, secret_types: Union[Iterable[str], None] = None, resolutions: Union[Iterable[str], None] = None, sort: Union[AlertSort, str] = AlertSort.CREATED, direction: Union[str, SortOrder] = SortOrder.DESC, ) -> AsyncIterator[SecretScanningAlert]: """ Get the list of secret scanning alerts for all repositories of a GitHub organization https://docs.github.com/en/rest/secret-scanning/secret-scanning#list-secret-scanning-alerts-for-an-organization Args: organization: Name of the organization state: Filter alerts by state secret_types: List of secret types to return. resolutions: List secret scanning alerts with one of these provided resolutions sort: The property by which to sort the results. Default is to sort alerts by creation date. direction: The direction to sort the results by. Default is desc. Raises: HTTPStatusError: A httpx.HTTPStatusError is raised if the request failed. Returns: An async iterator yielding the secret scanning alerts Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: async for alert in api.secret_scanning.organization_alerts( "my-org" ): print(alert) """ api = f"/orgs/{organization}/secret-scanning/alerts" async for alert in self._alerts( api, state=state, secret_types=secret_types, resolutions=resolutions, sort=sort, direction=direction, ): yield alert async def alerts( self, repo: str, *, state: Union[AlertState, str, None] = None, secret_types: Union[Iterable[str], None] = None, resolutions: Union[Iterable[str], None] = None, sort: Union[AlertSort, str] = AlertSort.CREATED, direction: Union[str, SortOrder] = SortOrder.DESC, ) -> AsyncIterator[SecretScanningAlert]: """ Get the list of secret scanning alerts for a repository https://docs.github.com/en/rest/secret-scanning/secret-scanning#list-secret-scanning-alerts-for-a-repository Args: repo: GitHub repository (owner/name) state: Filter alerts by state secret_types: List of secret types to return. resolutions: List secret scanning alerts with one of these provided resolutions sort: The property by which to sort the results. Default is to sort alerts by creation date. direction: The direction to sort the results by. Default is desc. Raises: HTTPStatusError: A httpx.HTTPStatusError is raised if the request failed. Returns: An async iterator yielding the secret scanning alerts Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: async for alert in api.secret_scanning.alerts( "my-org/my-repo" ): print(alert) """ api = f"/repos/{repo}/secret-scanning/alerts" async for alert in self._alerts( api, state=state, secret_types=secret_types, resolutions=resolutions, sort=sort, direction=direction, ): yield alert async def alert( self, repo: str, alert_number: Union[str, int], ) -> SecretScanningAlert: """ Get a single secret scanning alert https://docs.github.com/en/rest/secret-scanning/secret-scanning#get-a-secret-scanning-alert Args: repo: GitHub repository (owner/name) alert_number: The number that identifies a secret scanning alert in its repository Raises: HTTPStatusError: A httpx.HTTPStatusError is raised if the request failed. Returns: Secret scanning alert information Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: alert = await api.secret_scanning.alert("foo/bar", 123) """ api = f"/repos/{repo}/secret-scanning/alerts/{alert_number}" response = await self._client.get(api) response.raise_for_status() return SecretScanningAlert.from_dict(response.json()) async def update_alert( self, repo: str, alert_number: Union[str, int], state: Union[AlertState, str], *, resolution: Union[Resolution, str, None] = None, resolution_comment: Optional[str] = None, ) -> SecretScanningAlert: """ Update a single secret scanning alert https://docs.github.com/en/rest/secret-scanning/secret-scanning#update-a-secret-scanning-alert Args: repo: GitHub repository (owner/name) alert_number: The number that identifies a secret scanning alert in its repository state: The state of the alert resolution: Required when the state is resolved. The reason for resolving the alert resolution_comment: An optional comment when closing an alert. Cannot be updated or deleted. Raises: HTTPStatusError: A httpx.HTTPStatusError is raised if the request failed. Returns: Secret scanning alert information Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi from pontos.github.models.secret_scanning import ( AlertState, Resolution, ) async with GitHubAsyncRESTApi(token) as api: alert = await api.secret_scanning.update_alert( "foo/bar", 123, AlertState.RESOLVED, resolution=Resolution.WONT_FIX, resolution_comment="Not applicable", ) """ api = f"/repos/{repo}/secret-scanning/alerts/{alert_number}" data = {"state": enum_or_value(state)} if resolution: data["resolution"] = enum_or_value(resolution) if resolution_comment: data["resolution_comment"] = resolution_comment response = await self._client.patch(api, data=data) response.raise_for_status() return SecretScanningAlert.from_dict(response.json()) async def locations( self, repo: str, alert_number: Union[str, int], ) -> AsyncIterator[AlertLocation]: """ Lists all locations for a given secret scanning alert for an eligible repository https://docs.github.com/en/rest/secret-scanning/secret-scanning#list-locations-for-a-secret-scanning-alert Args: repo: GitHub repository (owner/name) alert_number: The number that identifies a secret scanning alert in its repository Raises: HTTPStatusError: A httpx.HTTPStatusError is raised if the request failed. Returns: An async iterator yielding the secret scanning alert locations Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: async for location in await api.secret_scanning.locations( "foo/bar", 123, ): print(location) """ api = f"/repos/{repo}/secret-scanning/alerts/{alert_number}/locations" params = {"per_page": "100"} async for response in self._client.get_all(api, params=params): response.raise_for_status() for location in response.json(): location_type = location["type"] location_details = location["details"] if location_type == LocationType.COMMIT.value: yield AlertLocation( type=LocationType.COMMIT, details=CommitLocation.from_dict(location_details), ) elif location_type == LocationType.ISSUE_BODY.value: yield AlertLocation( type=LocationType.ISSUE_BODY, details=IssueBodyLocation.from_dict(location_details), ) elif location_type == LocationType.ISSUE_COMMENT.value: yield AlertLocation( type=LocationType.ISSUE_COMMENT, details=IssueCommentLocation.from_dict( location_details ), ) elif location_type == LocationType.ISSUE_TITLE.value: yield AlertLocation( type=LocationType.ISSUE_TITLE, details=IssueTitleLocation.from_dict(location_details), ) pontos-25.3.2/pontos/github/api/tags.py000066400000000000000000000124111476255566300200340ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # from datetime import datetime from typing import Any, AsyncIterator, Dict, Optional, Union from pontos.github.api.client import GitHubAsyncREST from pontos.github.models.tag import GitObjectType, RepositoryTag, Tag from pontos.helper import enum_or_value class GitHubAsyncRESTTags(GitHubAsyncREST): async def create( self, repo: str, tag: str, message: str, name: str, email: str, git_object: str, *, git_object_type: Optional[ Union[GitObjectType, str] ] = GitObjectType.COMMIT, date: Optional[datetime] = None, ) -> Tag: """ Create a new Git tag https://docs.github.com/en/rest/git/tags#create-a-tag-object Args: repo: GitHub repository (owner/name) to use tag: The tag's name. This is typically a version (e.g., "v0.0.1"). message: The tag message. name: The name of the author of the tag email: The email of the author of the tag git_object: The SHA of the git object this is tagging. git_object_type: The type of the object we're tagging. Normally this is a commit type but it can also be a tree or a blob. date: When this object was tagged. Raises: HTTPStatusError: If the request was invalid Returns: A new git tag Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: tag = await api.tags.create( "foo/bar", "v1.2.3", "Create tag v1.2.3", "John Doe", "john@doe.com", e746420, ) print(tag) """ data = { "tag": tag, "message": message, "object": git_object, "type": enum_or_value(git_object_type), "tagger": { "name": name, "email": email, }, } if date: data["tagger"]["date"] = date.isoformat(timespec="seconds") api = f"/repos/{repo}/git/tags" response = await self._client.post(api, data=data) response.raise_for_status() return Tag.from_dict(response.json()) async def create_tag_reference( self, repo: str, tag: str, sha: str, ) -> None: """ Create git tag reference (A real tag in git). https://docs.github.com/en/rest/git/refs#create-a-reference Args: repo: The name of the repository. The name is not case sensitive. tag: Github tag name. sha: The SHA1 value for this Github tag. Raises: HTTPStatusError: If the request was invalid Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: await api.tags.create_tag_reference( "foo/bar", "v1.2.3", e746420, ) """ data: Dict[str, Any] = { "ref": f"refs/tags/{tag}", "sha": sha, } api = f"/repos/{repo}/git/refs" response = await self._client.post(api, data=data) response.raise_for_status() async def get(self, repo: str, tag_sha: str) -> Tag: """ Get information about a git tag Args: repo: GitHub repository (owner/name) to use tag_sha: SHA of the git tag object Raises: HTTPStatusError: If the request was invalid Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: tag = await api.tags.get("foo/bar", "e746420") print(tag) """ api = f"/repos/{repo}/git/tags/{tag_sha}" response = await self._client.get(api) response.raise_for_status() return Tag.from_dict(response.json()) async def get_all(self, repo: str) -> AsyncIterator[RepositoryTag]: """ Get information about all git tags Args: repo: GitHub repository (owner/name) to use Raises: HTTPStatusError: If the request was invalid Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: async for tag in api.tags.get_all( "foo/bar" ): print(tag) """ api = f"/repos/{repo}/git/tags" params = {"per_page": "100"} async for response in self._client.get_all(api, params=params): response.raise_for_status() for tag in response.json(): yield RepositoryTag.from_dict(tag) pontos-25.3.2/pontos/github/api/teams.py000066400000000000000000000344301476255566300202140ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # from typing import Any, AsyncIterator, Dict, Iterable, Optional, Union from pontos.github.api.client import GitHubAsyncREST from pontos.github.models.base import ( Permission, Team, TeamPrivacy, TeamRole, User, ) from pontos.github.models.organization import Repository from pontos.helper import enum_or_value class GitHubAsyncRESTTeams(GitHubAsyncREST): async def get_all(self, organization: str) -> AsyncIterator[Team]: """ Get information about teams of an organization. https://docs.github.com/en/rest/teams/teams#list-teams Args: organization: GitHub organization to use Raises: `httpx.HTTPStatusError`: If there was an error in the request Returns: An async iterator yielding teams Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: async for team in api.teams.get_all("foo"): print(team) """ api = f"/orgs/{organization}/teams" params = { "per_page": "100", } async for response in self._client.get_all(api, params=params): response.raise_for_status() for team in response.json(): yield Team.from_dict(team) async def create( self, organization: str, name: str, *, description: Optional[str] = None, maintainers: Optional[Iterable[str]] = None, repo_names: Optional[Iterable[str]] = None, privacy: Union[TeamPrivacy, str, None] = None, parent_team_id: Optional[str] = None, ) -> Team: # pylint: disable=line-too-long """ Create a new team in an organization https://docs.github.com/en/rest/teams/teams#create-a-team Args: organization: GitHub organization to use name: The name of the new team. description: The description of the team. maintainers: List GitHub IDs for organization members who will become team maintainers. repo_names: The full name (e.g., "organization-name/repository-name" ) of repositories to add the team to. privacy: The level of privacy this team should have. The options are: For a non-nested team: * secret - only visible to organization owners and members of this team. * closed - visible to all members of this organization. Default: secret For a parent or child team: * closed - visible to all members of this organization. Default for child team: closed parent_team_id: The ID of a team to set as the parent team. Raises: `httpx.HTTPStatusError`: If there was an error in the request Returns: A new team Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: team = await api.teams.create("foo", "devops") print(team) """ # noqa: E501 api = f"/orgs/{organization}/teams" data: Dict[str, Any] = {"name": name} if description: data["description"] = description if maintainers: data["maintainers"] = list(maintainers) if repo_names: data["repo_names"] = list(repo_names) if privacy: data["privacy"] = enum_or_value(privacy) if parent_team_id: data["parent_team_id"] = parent_team_id response = await self._client.post(api, data=data) response.raise_for_status() return Team.from_dict(response.json()) async def get( self, organization: str, team: str, ) -> Team: """ Gets a team using the team's slug. GitHub generates the slug from the team name. https://docs.github.com/en/rest/teams/teams#get-a-team-by-name Args: organization: GitHub organization to use team: The team slug of the team. Raises: `httpx.HTTPStatusError`: If there was an error in the request Returns: Information about the team Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: team = await api.teams.get("foo", "devops") print(team) """ api = f"/orgs/{organization}/teams/{team}" response = await self._client.get(api) response.raise_for_status() return Team.from_dict(response.json()) async def update( self, organization: str, team: str, *, name: str, description: Optional[str] = None, privacy: Union[TeamPrivacy, str, None] = None, parent_team_id: Optional[str] = None, ) -> Team: # pylint: disable=line-too-long """ Update team information in an organization https://docs.github.com/en/rest/teams/teams#update-a-team Args: organization: GitHub organization to use team: The slug of the team name. name: The name of the team. description: The description of the team. privacy: The level of privacy this team should have. The options are: For a non-nested team: * secret - only visible to organization owners and members of this team. * closed - visible to all members of this organization. Default: secret For a parent or child team: * closed - visible to all members of this organization. Default for child team: closed parent_team_id: The ID of a team to set as the parent team. Raises: `httpx.HTTPStatusError`: If there was an error in the request Returns: The updated team Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: team = await api.teams.update( "foo", "devops", name="DevSecOps" ) """ # noqa: E501 api = f"/orgs/{organization}/teams/{team}" data: Dict[str, Any] = {} if name: data["name"] = name if description: data["description"] = description if privacy: data["privacy"] = enum_or_value(privacy) if parent_team_id: data["parent_team_id"] = parent_team_id response = await self._client.post(api, data=data) response.raise_for_status() return Team.from_dict(response.json()) async def delete( self, organization: str, team: str, ) -> None: """ Delete a new team of an organization https://docs.github.com/en/rest/teams/teams#delete-a-team Args: organization: GitHub organization to use team: The slug of the team name. Raises: `httpx.HTTPStatusError`: If there was an error in the request Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: await api.teams.delete("foo", "bar") """ api = f"/orgs/{organization}/teams/{team}" response = await self._client.delete(api) response.raise_for_status() async def members( self, organization: str, team: str, ) -> AsyncIterator[User]: """ Get all members of a team. Team members will include the members of child teams. https://docs.github.com/en/rest/teams/members#list-team-members Args: organization: GitHub organization to use team: The slug of the team name. Raises: `httpx.HTTPStatusError`: If there was an error in the request Returns: An async iterator yielding users Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: async for user in api.teams.members("foo", "bar): print(user) """ api = f"/orgs/{organization}/teams/{team}/members" params = { "per_page": "100", } async for response in self._client.get_all(api, params=params): response.raise_for_status() for member in response.json(): yield User.from_dict(member) async def update_member( self, organization: str, team: str, username: str, *, role: Union[TeamRole, str] = TeamRole.MEMBER, ) -> None: """ Add or update a member of a team. https://docs.github.com/en/rest/teams/members#add-or-update-team-membership-for-a-user Args: organization: GitHub organization to use team: The slug of the team name. username: The handle for the GitHub user account. role: The role that this user should have in the team. Default: member. Can be one of: member, maintainer. Raises: `httpx.HTTPStatusError`: If there was an error in the request Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi from pontos.github.models import TeamRole async with GitHubAsyncRESTApi(token) as api: await api.teams.update_member( "foo", "bar", "a_user", role=TeamRole.MAINTAINER, ) """ api = f"/orgs/{organization}/teams/{team}/memberships/{username}" data: Dict[str, Any] = {"role": enum_or_value(role)} response = await self._client.put(api, data=data) response.raise_for_status() # add_member is the same API as update_member add_member = update_member async def remove_member( self, organization: str, team: str, username: str, ) -> None: """ Remove a member from a team. https://docs.github.com/en/rest/teams/members#remove-team-membership-for-a-user Args: organization: GitHub organization to use team: The slug of the team name. username: The handle for the GitHub user account. Raises: `httpx.HTTPStatusError`: If there was an error in the request Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi from pontos.github.models import TeamRole async with GitHubAsyncRESTApi(token) as api: await api.teams.remove_member( "foo", "bar", "a_user", ) """ api = f"/orgs/{organization}/teams/{team}/memberships/{username}" response = await self._client.delete(api) response.raise_for_status() async def repositories( self, organization: str, team: str, ) -> AsyncIterator[Repository]: """ Lists a team's repositories visible to the authenticated user. https://docs.github.com/en/rest/teams/teams#list-team-repositories Args: organization: GitHub organization to use team: The slug of the team name. Raises: `httpx.HTTPStatusError`: If there was an error in the request Returns: An async iterator yielding repositories Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: async for repo in api.teams.repositories("foo", "bar"): print(repo) """ api = f"/orgs/{organization}/teams/{team}/repos" params = { "per_page": "100", } async for response in self._client.get_all(api, params=params): response.raise_for_status() for repo in response.json(): yield Repository.from_dict(repo) async def update_permission( self, organization: str, team: str, repository: str, permission: Union[Permission, str], ) -> None: """ Add or update team repository permissions Args: organization: GitHub organization to use team: The slug of the team name. repository: GitHub repository (only name) to add or change permissions on. permission: The permission to grant the team on the repository. Raises: `httpx.HTTPStatusError`: If there was an error in the request Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi from pontos.github.models import Permission async with GitHubAsyncRESTApi(token) as api: await api.teams.update_permission( "foo", "bar", "baz", Permission.MAINTAIN, ) """ api = ( f"/orgs/{organization}/teams/{team}/repos/{organization}/" f"{repository}" ) data: Dict[str, Any] = {"permission": enum_or_value(permission)} response = await self._client.put(api, data=data) response.raise_for_status() add_permission = update_permission pontos-25.3.2/pontos/github/api/users.py000066400000000000000000000217061476255566300202460ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later from typing import AsyncIterator, Union from pontos.github.api.client import GitHubAsyncREST from pontos.github.api.helper import JSON_OBJECT from pontos.github.models.base import User from pontos.github.models.user import ( EmailInformation, SSHPublicKey, SSHPublicKeyExtended, ) class GitHubAsyncRESTUsers(GitHubAsyncREST): async def users(self) -> AsyncIterator[User]: """ https://docs.github.com/en/rest/users/users#list-users Args: username: The handle for the GitHub user account Raises: HTTPStatusError: A httpx.HTTPStatusError is raised if the request failed. Returns: An async iterator yielding user information Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: async for user in api.users.users(): print(user) """ api = "/users" params = {"per_page": "100"} async for response in self._client.get_all(api, params=params): response.raise_for_status() for user in response.json(): yield User.from_dict(user) async def user(self, username: str) -> User: """ Provide publicly available information about someone with a GitHub account https://docs.github.com/en/rest/users/users#get-a-user Args: username: The handle for the GitHub user account Raises: HTTPStatusError: A httpx.HTTPStatusError is raised if the request failed. Returns: Information about the user Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: user = await api.users.user("foo") print(user) """ api = f"/users/{username}" response = await self._client.get(api) response.raise_for_status() return User.from_dict(response.json()) async def current_user(self) -> User: """ Get the current authenticated user https://docs.github.com/en/rest/users/users#get-the-authenticated-user Raises: HTTPStatusError: A httpx.HTTPStatusError is raised if the request failed. Returns: Information about the user Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: user = await api.users.current_user() print(user) """ api = "/user" response = await self._client.get(api) response.raise_for_status() return User.from_dict(response.json()) async def user_keys(self, username: str) -> AsyncIterator[SSHPublicKey]: """ List the verified public SSH keys for a user https://docs.github.com/en/rest/users/keys#list-public-keys-for-a-user Args: username: The handle for the GitHub user account Raises: HTTPStatusError: A httpx.HTTPStatusError is raised if the request failed. Returns: An async iterator yielding ssh key information Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: async for key in api.users.user_keys("foo"): print(key) """ api = f"/users/{username}/keys" params = {"per_page": "100"} async for response in self._client.get_all(api, params=params): response.raise_for_status() for key in response.json(): yield SSHPublicKey.from_dict(key) async def keys(self) -> AsyncIterator[SSHPublicKey]: """ List the public SSH keys for the authenticated user's GitHub account https://docs.github.com/en/rest/users/keys#list-public-ssh-keys-for-the-authenticated-user Raises: HTTPStatusError: A httpx.HTTPStatusError is raised if the request failed. Returns: An async iterator yielding ssh key information Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: async for key in api.users.keys(): print(key) """ api = "/user/keys" params = {"per_page": "100"} async for response in self._client.get_all(api, params=params): response.raise_for_status() for key in response.json(): yield SSHPublicKey.from_dict(key) async def emails(self) -> AsyncIterator[EmailInformation]: """ List all email addresses of the currently logged in user https://docs.github.com/en/rest/users/emails#list-email-addresses-for-the-authenticated-user Raises: HTTPStatusError: A httpx.HTTPStatusError is raised if the request failed. Returns: An async iterator yielding email information Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: async for email in api.users.emails(): print(email) """ api = "/user/emails" params = {"per_page": "100"} async for response in self._client.get_all(api, params=params): response.raise_for_status() for email in response.json(): yield EmailInformation.from_dict(email) async def key(self, key_id: Union[str, int]) -> SSHPublicKeyExtended: """ View extended details for a single public SSH key https://docs.github.com/en/rest/users/keys#get-a-public-ssh-key-for-the-authenticated-user Args: key_id: The unique identifier of the key Raises: HTTPStatusError: A httpx.HTTPStatusError is raised if the request failed. Returns: Extended information about the SSH key Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: key = await api.users.key(123) print(key) """ api = f"/user/keys/{key_id}" response = await self._client.get(api) response.raise_for_status() return SSHPublicKeyExtended.from_dict(response.json()) async def delete_key(self, key_id: Union[str, int]) -> None: """ Removes a public SSH key from the authenticated user's GitHub account https://docs.github.com/en/rest/users/keys#delete-a-public-ssh-key-for-the-authenticated-user Args: key_id: The unique identifier of the key Raises: HTTPStatusError: A httpx.HTTPStatusError is raised if the request failed. Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: await api.users.delete_key(123) """ api = f"/user/keys/{key_id}" response = await self._client.delete(api) response.raise_for_status() async def create_key(self, title: str, key: str) -> SSHPublicKeyExtended: """ Adds a public SSH key to the authenticated user's GitHub account https://docs.github.com/en/rest/users/keys#create-a-public-ssh-key-for-the-authenticated-user Args: title: A descriptive name for the new key key: The public SSH key to add to your GitHub account Raises: HTTPStatusError: A httpx.HTTPStatusError is raised if the request failed. Returns: Extended information about the SSH key Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: key = await api.users.create_key( "My SSH Public Key", "2Sg8iYjAxxmI2LvUXpJjkYrMxURPc8r+dB7TJyvv1234" ) print(key) """ api = "/user/keys" data: JSON_OBJECT = {"key": key, "title": title} response = await self._client.post(api, data=data) response.raise_for_status() return SSHPublicKeyExtended.from_dict(response.json()) pontos-25.3.2/pontos/github/api/workflows.py000066400000000000000000000205631476255566300211420ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # from typing import Any, AsyncIterator, Dict, Optional, Union from pontos.github.api.client import GitHubAsyncREST from pontos.github.models.base import Event from pontos.github.models.workflow import ( Workflow, WorkflowRun, WorkflowRunStatus, ) from pontos.helper import enum_or_value class GitHubAsyncRESTWorkflows(GitHubAsyncREST): def get_all(self, repo: str) -> AsyncIterator[Workflow]: """ List all workflows of a repository https://docs.github.com/en/rest/actions/workflows#list-repository-workflows Args: repo: GitHub repository (owner/name) to use Raises: HTTPStatusError: A httpx.HTTPStatusError is raised if the request failed. Returns: An async iterator yielding workflows Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: async for workflow in api.workflows.get_all("foo/bar"): print(workflow) """ api = f"/repos/{repo}/actions/workflows" return self._get_paged_items(api, "workflows", Workflow) # type: ignore async def get(self, repo: str, workflow: Union[str, int]) -> Workflow: """ Get the information for the given workflow https://docs.github.com/en/rest/actions/workflows#get-a-workflow Args: repo: GitHub repository (owner/name) to use workflow: ID of the workflow or workflow file name. For example `main.yml`. Raises: HTTPStatusError: A httpx.HTTPStatusError is raised if the request failed. Returns: Information about the workflow Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: workflow = await api.workflows.get("foo/bar", "ci.yml") print(workflow) """ api = f"/repos/{repo}/actions/workflows/{workflow}" response = await self._client.get(api) response.raise_for_status() return Workflow.from_dict(response.json()) async def create_workflow_dispatch( self, repo: str, workflow: Union[str, int], *, ref: str, inputs: Optional[Dict[str, str]] = None, ) -> None: """ Create a workflow dispatch event to manually trigger a GitHub Actions workflow run. https://docs.github.com/en/rest/actions/workflows#create-a-workflow-dispatch-event Args: repo: GitHub repository (owner/name) to use workflow: ID of the workflow or workflow file name. For example `main.yml`. ref: The git reference for the workflow. The reference can be a branch or tag name. inputs: Input keys and values configured in the workflow file. Any default properties configured in the workflow file will be used when inputs are omitted. Raises: HTTPStatusError: A httpx.HTTPStatusError is raised if the request failed. Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi with GitHubAsyncRESTApi(token) as api: await api.workflows.create_workflow_dispatch( "foo/bar", "ci.yml", ref="main" ) """ api = f"/repos/{repo}/actions/workflows/{workflow}/dispatches" data: Dict[str, Any] = {"ref": ref} if inputs: data["inputs"] = inputs response = await self._client.post(api, data=data) response.raise_for_status() def get_workflow_runs( self, repo: str, workflow: Optional[Union[str, int]] = None, *, actor: Optional[str] = None, branch: Optional[str] = None, event: Optional[Union[Event, str]] = None, status: Optional[Union[WorkflowRunStatus, str]] = None, created: Optional[str] = None, exclude_pull_requests: Optional[bool] = None, ) -> AsyncIterator[WorkflowRun]: # pylint: disable=line-too-long """ List all workflow runs of a repository or of a specific workflow. https://docs.github.com/en/rest/actions/workflow-runs#list-workflow-runs-for-a-repository https://docs.github.com/en/rest/actions/workflow-runs#list-workflow-runs-for-a-workflow Args: repo: GitHub repository (owner/name) to use workflow: Optional ID of the workflow or workflow file name. For example `main.yml`. actor: Only return workflow runs of this user ID. branch: Only return workflow runs for a specific branch. event: Only return workflow runs triggered by the event specified. For example, `push`, `pull_request` or `issue`. For more information, see https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows. status: Only return workflow runs with the check run status or conclusion that specified. For example, a conclusion can be `success` or a status can be `in_progress`. Can be one of: `completed`, `action_required`, `cancelled`, `failure`, `neutral`, `skipped`, `stale`, `success`, `timed_out`, `in_progress`, `queued`, `requested`, `waiting`. created: Only returns workflow runs created within the given date-time range. For more information on the syntax, see https://docs.github.com/en/search-github/getting-started-with-searching-on-github/understanding-the-search-syntax#query-for-dates exclude_pull_requests: If true pull requests are omitted from the response. Raises: HTTPStatusError: A httpx.HTTPStatusError is raised if the request failed. Returns: An async iterator yielding workflow runs Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: async for run in api.workflows.get_workflow_runs( "foo/bar", "ci.yml" ): print(run) """ api = ( f"/repos/{repo}/actions/workflows/{workflow}/runs" if workflow else f"/repos/{repo}/actions/runs" ) params: Dict[str, Any] = {} if actor: params["actor"] = actor if branch: params["branch"] = branch if event: params["event"] = enum_or_value(event) if status: params["status"] = enum_or_value(status) if created: params["created"] = created if exclude_pull_requests is not None: params["exclude_pull_requests"] = exclude_pull_requests return self._get_paged_items( # type: ignore api, "workflow_runs", WorkflowRun, params=params ) async def get_workflow_run( self, repo: str, run: Union[str, int] ) -> WorkflowRun: """ Get information about a single workflow run https://docs.github.com/en/rest/actions/workflow-runs#get-a-workflow-run Args: repo: GitHub repository (owner/name) to use run: The ID of the workflow run Raises: HTTPStatusError: A httpx.HTTPStatusError is raised if the request failed. Returns: Information about the workflow run Example: .. code-block:: python from pontos.github.api import GitHubAsyncRESTApi async with GitHubAsyncRESTApi(token) as api: run = await api.workflows.get_workflow_run("foo/bar", 123) print(run) """ api = f"/repos/{repo}/actions/runs/{run}" response = await self._client.get(api) response.raise_for_status() return WorkflowRun.from_dict(response.json()) pontos-25.3.2/pontos/github/cmds.py000066400000000000000000000200761476255566300172610ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # import sys from argparse import Namespace from pathlib import Path import httpx from pontos.github.api import GitHubAsyncRESTApi from pontos.terminal import Terminal async def tag(terminal: Terminal, args: Namespace) -> None: """Github release function for argument class to call""" await args.tag_func(terminal, args) async def create_tag(terminal: Terminal, args: Namespace) -> None: """Github create tag function for argument class to call""" async with GitHubAsyncRESTApi(token=args.token) as api: try: # Create tag new_tag = await api.tags.create( repo=args.repo, tag=args.tag, message=args.message, git_object=args.git_object, name=args.name, email=args.email, git_object_type=args.git_object_type, date=args.date, ) # Create tag reference await api.tags.create_tag_reference( repo=args.repo, tag=args.tag, sha=new_tag.sha ) except httpx.HTTPError as e: terminal.error(str(e)) sys.exit(1) terminal.ok("Tag created.") async def release(terminal: Terminal, args: Namespace) -> None: """Github release function for argument class to call""" await args.re_func(terminal, args) async def create_release(terminal: Terminal, args: Namespace) -> None: """Github create release function for argument class to call""" async with GitHubAsyncRESTApi(token=args.token) as api: try: # Check if release exist exists = await api.releases.exists(repo=args.repo, tag=args.tag) if exists: terminal.error(f"Release {args.tag} exist.") sys.exit(1) # Create release await api.releases.create( repo=args.repo, tag=args.tag, body=args.body, name=args.name, target_commitish=args.target_commitish, draft=args.draft, prerelease=args.prerelease, ) terminal.ok("Release created.") except httpx.HTTPError as e: terminal.error(str(e)) sys.exit(1) async def pull_request(terminal: Terminal, args: Namespace): await args.pr_func(terminal, args) async def create_pull_request(terminal: Terminal, args: Namespace): async with GitHubAsyncRESTApi(token=args.token) as api: try: # check if branches exist exists = await api.branches.exists(repo=args.repo, branch=args.head) if not exists: terminal.error( f"Head branch {args.head} is not existing " "or authorisation failed." ) sys.exit(1) terminal.ok(f"Head branch {args.head} is existing.") exists = await api.branches.exists( repo=args.repo, branch=args.target ) if not exists: terminal.error( f"Target branch {args.target} is not existing or " "authorisation failed." ) sys.exit(1) terminal.ok(f"Target branch {args.target} exists.") await api.pull_requests.create( repo=args.repo, head_branch=args.head, base_branch=args.target, title=args.title, body=args.body, ) terminal.ok("Pull Request created.") except httpx.HTTPError as e: terminal.error(str(e)) sys.exit(1) async def update_pull_request(terminal: Terminal, args: Namespace): async with GitHubAsyncRESTApi(token=args.token) as api: try: if args.target: # check if branches exist exists = await api.branches.exists( repo=args.repo, branch=args.target ) if not exists: terminal.error( f"Target branch {args.target} is not existing or " "authorisation failed." ) sys.exit(1) terminal.ok(f"Target branch {args.target} exists.") await api.pull_requests.update( repo=args.repo, pull_request=args.pull_request, base_branch=args.target, title=args.title, body=args.body, ) terminal.ok("Pull Request updated.") except httpx.HTTPError as e: terminal.error(str(e)) sys.exit(1) async def file_status(terminal: Terminal, args: Namespace): async with GitHubAsyncRESTApi(token=args.token) as api: try: # check if PR is existing exists = await api.pull_requests.exists( repo=args.repo, pull_request=args.pull_request ) if not exists: terminal.error( f"PR {args.pull_request} is not existing " "or authorisation failed." ) sys.exit(1) terminal.ok(f"PR {args.pull_request} exists.") file_dict = await api.pull_requests.files( repo=args.repo, pull_request=args.pull_request, status_list=args.status, ) for status in args.status: terminal.info(f"{status.value}:") files = [str(f.resolve()) for f in file_dict[status]] for file_string in files: terminal.print(file_string) if args.output: args.output.write("\n".join(files) + "\n") except httpx.HTTPError as e: terminal.error(str(e)) sys.exit(1) async def labels(terminal: Terminal, args: Namespace): async with GitHubAsyncRESTApi(token=args.token) as api: try: # check if PR is existing exists = await api.pull_requests.exists( repo=args.repo, pull_request=args.issue ) if not exists: terminal.error( f"PR {args.issue} is not existing or authorisation failed." ) sys.exit(1) terminal.ok(f"PR {args.issue} exists.") issue_labels = [] async for label in api.labels.get_all( repo=args.repo, issue=args.issue, ): issue_labels.append(label) issue_labels.extend(args.labels) await api.labels.set_all( repo=args.repo, issue=args.issue, labels=issue_labels ) except httpx.HTTPError as e: terminal.error(str(e)) sys.exit(1) async def repos(terminal: Terminal, args: Namespace): async with GitHubAsyncRESTApi(token=args.token) as api: try: # check if Orga is existing exists = await api.organizations.exists(args.orga) if not exists: terminal.error( f"Organization {args.orga} is not existing or authorisation failed." ) sys.exit(1) terminal.ok(f"Organization {args.orga} exists.") if args.path: repo_info = Path(args.path) with repo_info.open(encoding="utf-8", mode="w") as fp: async for repo in api.organizations.get_repositories( organization=args.orga, repository_type=args.type ): fp.write(repr(repo)) else: async for repo in api.organizations.get_repositories( organization=args.orga, repository_type=args.type ): terminal.print(repo) except httpx.HTTPError as e: terminal.error(str(e)) sys.exit(1) pontos-25.3.2/pontos/github/main.py000066400000000000000000000013121476255566300172470ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2021-2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # import asyncio import sys from pontos.terminal.null import NullTerminal from pontos.terminal.rich import RichTerminal from ._parser import parse_args def main(args=None): parsed_args = parse_args(args) if parsed_args.quiet: term = NullTerminal() else: term = RichTerminal() term.bold_info(f"pontos-github => {parsed_args.func.__name__}") with term.indent(): if not parsed_args.token: term.error("A Github User Token is required.") sys.exit(1) asyncio.run(parsed_args.func(term, parsed_args)) if __name__ == "__main__": main() pontos-25.3.2/pontos/github/models/000077500000000000000000000000001476255566300172375ustar00rootroot00000000000000pontos-25.3.2/pontos/github/models/__init__.py000066400000000000000000000010401476255566300213430ustar00rootroot00000000000000# Copyright (C) 2022 - 2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # # ruff: noqa: F403 from pontos.github.models.artifact import * from pontos.github.models.base import * from pontos.github.models.branch import * from pontos.github.models.organization import * from pontos.github.models.packages import * from pontos.github.models.pull_request import * from pontos.github.models.release import * from pontos.github.models.search import * from pontos.github.models.tag import * from pontos.github.models.workflow import * pontos-25.3.2/pontos/github/models/artifact.py000066400000000000000000000032651476255566300214140ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # from dataclasses import dataclass from datetime import datetime from typing import Optional from pontos.github.models.base import GitHubModel __all__ = ( "Artifact", "ArtifactWorkflowRun", ) @dataclass class ArtifactWorkflowRun(GitHubModel): """ The workflow run that uploaded the artifact Attributes: id: ID of the workflow run repository_id: ID of the corresponding repository head_repository_id: head_branch: Corresponding branch name head_sha: Commit ID of the head of the corresponding branch """ id: int repository_id: int head_repository_id: int head_branch: str head_sha: str @dataclass class Artifact(GitHubModel): """ A GitHub Artifact model Attributes: id: ID of the artifact node_id: Node ID of the artifact name: Name of the artifact size_in_bytes: The size (in bytes) of the artifact url: REST API URL of the artifact archive_download_url: URL to download the artifact expired: True if the artifact has expired created_at: Creation date of the artifact expires_at: Expiration date of the artifact update_at: Last modification date of the artifact workflow_run: Corresponding GitHub workflow run """ id: int node_id: str name: str size_in_bytes: int url: str archive_download_url: str expired: bool created_at: Optional[datetime] = None expires_at: Optional[datetime] = None updated_at: Optional[datetime] = None workflow_run: Optional[ArtifactWorkflowRun] = None pontos-25.3.2/pontos/github/models/base.py000066400000000000000000000157361476255566300205370ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # from dataclasses import dataclass from typing import List, Optional from pontos.models import Model, StrEnum __all__ = ( "App", "Event", "FileStatus", "GitHubModel", "Permission", "SortOrder", "Team", "TeamPrivacy", "TeamRole", "User", ) class FileStatus(StrEnum): """ File status Attributes: ADDED: File is added DELETED: File is deleted MODIFIED: File is modified RENAMED: File is renamed COPIED: File is copied CHANGED: File is changed UNCHANGED: File is unchanged """ ADDED = "added" DELETED = "deleted" MODIFIED = "modified" RENAMED = "renamed" COPIED = "copied" CHANGED = "changed" UNCHANGED = "unchanged" @dataclass(init=False) class GitHubModel(Model): """ Base class for all GitHub models """ @dataclass class User(GitHubModel): """ A GitHub user model Attributes: login: The user login name id: The user ID node_id: The user node ID avatar_url: URL to the avatar image gravatar_url: URL to the gravatar avatar image html_url: URL to the users public profile followers_url: URL to the followers following_url: URL to users that the user if following gists_url: URL to the user's gists starred_url: URL to the starred repositories of the user subscriptions_url: URL to the subscriptions organizations_url: URL to the user's organizations repos_url: URL to the user's repositories events_url: URL to the events received_events_url: URL to the received events type: The user's type site_admin: True if the user is a site admin """ login: str id: int node_id: str avatar_url: str gravatar_id: str url: str html_url: str followers_url: str following_url: str gists_url: str starred_url: str subscriptions_url: str organizations_url: str repos_url: str events_url: str received_events_url: str type: str site_admin: bool class TeamPrivacy(StrEnum): """ Team privacy Attributes: SECRET: A secret team CLOSED: A closed team """ SECRET = "secret" CLOSED = "closed" class TeamRole(StrEnum): """ A user's role withing a team Attributes: MEMBER: The user is a "normal" member MAINTAINER: The user is an admin of the team """ MEMBER = "member" MAINTAINER = "maintainer" class Permission(StrEnum): # pylint: disable=line-too-long """ Permissions on a repository/project at GitHub https://docs.github.com/en/organizations/managing-user-access-to-your-organizations-repositories/repository-roles-for-an-organization Attributes: PULL: Read permissions PUSH: Write permissions TRIAGE: Triage permissions MAINTAIN: Maintainer permissions ADMIN: Admin permissions (full access to the project) """ PULL = "pull" PUSH = "push" TRIAGE = "triage" MAINTAIN = "maintain" ADMIN = "admin" @dataclass class Team(GitHubModel): """ A GitHub Team model Attributes: id: ID of the team node_id: Node ID of the team url: REST API URL for the team html_url: Web URL for the team name: Name of the team slug: Slug of the team name description: Description of the team privacy: Privacy scope of the team permission: Permissions of the teams members_url: REST API URL to the members of the team repositories_url: REST API URL to the repositories of the team parent: An optional parent team """ id: int node_id: str url: str html_url: str name: str slug: str description: str privacy: TeamPrivacy permission: Permission members_url: str repositories_url: str parent: Optional["Team"] = None @dataclass class App(GitHubModel): """ GitHub app Attributes: id: ID of the app slug: Name slug of the app node_id: Node ID of the app owner: Owner (user) of the app name: Name of the app description: Description of the app external_url: External URL html_url: URL to the web page of the app created_at: Creation date updated_at: Last modification date events: List of events """ id: int slug: str node_id: str owner: User name: str description: str external_url: str html_url: str created_at: str updated_at: str events: List[str] class Event(StrEnum): """ A GitHub event type https://docs.github.com/de/actions/using-workflows/events-that-trigger-workflows Attributes: BRANCH_PROTECTION_RULE: CHECK_RUN: CHECK_SUITE: CREATE: DELETE: DEPLOYMENT: DEPLOYMENT_STATUS: DISCUSSION: DISCUSSION_COMMENT: FORK: GOLLUM: ISSUE_COMMENT: ISSUES: LABEL: MERGE_GROUP: MILESTONE: PAGE_BUILD: PROJECT: PROJECT_CARD: PROJECT_COLUMN: PUBLIC: PULL_REQUEST: PULL_REQUEST_COMMENT: PULL_REQUEST_REVIEW: PULL_REQUEST_REVIEW_COMMENT: PULL_REQUEST_TARGET: PUSH: REGISTRY_PACKAGE: RELEASE: REPOSITORY_DISPATCH: SCHEDULE: STATUS: WATCH: WORKFLOW_CALL: WORKFLOW_DISPATCH: WORKFLOW_RUN: """ BRANCH_PROTECTION_RULE = "branch_protection_rule" CHECK_RUN = "check_run" CHECK_SUITE = "check_suite" CREATE = "create" DELETE = "delete" DEPLOYMENT = "deployment" DEPLOYMENT_STATUS = "deployment_status" DISCUSSION = "discussion" DISCUSSION_COMMENT = "discussion_comment" DYNAMIC = "dynamic" FORK = "fork" GOLLUM = "gollum" ISSUE_COMMENT = "issue_comment" ISSUES = "issues" LABEL = "label" MERGE_GROUP = "merge_group" MILESTONE = "milestone" PAGE_BUILD = "page_build" PROJECT = "project" PROJECT_CARD = "project_card" PROJECT_COLUMN = "project_column" PUBLIC = "public" PULL_REQUEST = "pull_request" PULL_REQUEST_COMMENT = "pull_request_comment" PULL_REQUEST_REVIEW = "pull_request_review" PULL_REQUEST_REVIEW_COMMENT = "pull_request_review_comment" PULL_REQUEST_TARGET = "pull_request_target" PUSH = "push" REGISTRY_PACKAGE = "registry_package" RELEASE = "release" REPOSITORY_DISPATCH = "repository_dispatch" SCHEDULE = "schedule" STATUS = "status" WATCH = "watch" WORKFLOW_CALL = "workflow_call" WORKFLOW_DISPATCH = "workflow_dispatch" WORKFLOW_RUN = "workflow_run" class SortOrder(StrEnum): """ Sort order: asc or desc Attributes: ASC: Use ascending sort order DESC: Use descending sort order """ ASC = "asc" DESC = "desc" pontos-25.3.2/pontos/github/models/billing.py000066400000000000000000000044511476255566300212350ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later from dataclasses import dataclass from typing import Optional from pontos.github.models.base import GitHubModel @dataclass class ActionsMinutesUsedBreakdown(GitHubModel): """ Attributes: UBUNTU: Total minutes used on Ubuntu runner machines MACOS: Total minutes used on macOS runner machines WINDOWS: Total minutes used on Windows runner machines total: Total minutes used on all runner machines """ UBUNTU: Optional[int] = None MACOS: Optional[int] = None WINDOWS: Optional[int] = None total: Optional[int] = None @dataclass class ActionsBilling(GitHubModel): """ Billing Information for using GitHub Actions Attributes: total_minutes_used: The sum of the free and paid GitHub Actions minutes used total_paid_minutes_used: The total paid GitHub Actions minutes used included_minutes: The amount of free GitHub Actions minutes available minutes_used_breakdown: """ total_minutes_used: int total_paid_minutes_used: int included_minutes: int minutes_used_breakdown: ActionsMinutesUsedBreakdown @dataclass class PackagesBilling(GitHubModel): """ Billing Information for using GitHub Packages Attributes: total_gigabytes_bandwidth_used: Sum of the free and paid storage space (GB) for GitHub Packages total_paid_gigabytes_bandwidth_used: Total paid storage space (GB) for GitHub Packages included_gigabytes_bandwidth: Free storage space (GB) for GitHub Packages """ total_gigabytes_bandwidth_used: int total_paid_gigabytes_bandwidth_used: int included_gigabytes_bandwidth: int @dataclass class StorageBilling(GitHubModel): """ Billing Information for using GitHub storage Attributes: days_left_in_billing_cycle: Numbers of days left in billing cycle estimated_paid_storage_for_month: Estimated storage space (GB) used in billing cycle estimated_storage_for_month: Estimated sum of free and paid storage space (GB) used in billing cycle """ days_left_in_billing_cycle: int estimated_paid_storage_for_month: int estimated_storage_for_month: int pontos-25.3.2/pontos/github/models/branch.py000066400000000000000000000142301476255566300210460ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # from dataclasses import dataclass from typing import List, Optional from pontos.github.models.base import App, GitHubModel, Team, User __all__ = ( "BranchProtection", "BranchProtectionFeature", "BypassPullRequestAllowances", "DismissalRestrictions", "Restrictions", "RequiredStatusChecks", "StatusCheck", "RequiredPullRequestReviews", "RequiredStatusChecks", "BypassPullRequestAllowances", "DismissalRestrictions", ) @dataclass class DismissalRestrictions(GitHubModel): """ Settings to only allow specific users, teams and apps to dismiss pull request reviews Attributes: url: URL to the dismissal restrictions users_url: URL to the users of the dismissal restrictions teams_url: URL to the teams of the dismissal restrictions users: List of user allowed to dismiss pull request reviews teams: List of teams allowed to dismiss pull request reviews apps: List of apps allowed to dismiss pull request reviews """ url: str users_url: str teams_url: str users: List[User] teams: List[Team] apps: List[App] @dataclass class BypassPullRequestAllowances(GitHubModel): """ Settings to allow users, teams and apps to bypass pull request reviews Attributes: user: List of user allowed to bypass required pull request reviews teams: List of teams allowed to bypass required pull request reviews apps: List of apps allowed to bypass required pull request reviews """ users: List[User] teams: List[Team] apps: List[App] @dataclass class RequiredPullRequestReviews(GitHubModel): """ Requires pull request review settings of a branch protection Attributes: url: URL to the required pull request reviews dismiss_stale_reviews: Dismiss stale reviews require_code_owner_reviews: Require reviews by code owners required_approving_review_count: Number of approvals required require_last_push_approval: Require to approve the last push dismissal_restrictions: Restrictions for who can dismiss pull request reviews bypass_pull_request_allowances: Settings for allowing bypassing the required pull request reviews """ url: str dismiss_stale_reviews: bool require_code_owner_reviews: bool required_approving_review_count: int require_last_push_approval: bool dismissal_restrictions: Optional[DismissalRestrictions] = None bypass_pull_request_allowances: Optional[BypassPullRequestAllowances] = None @dataclass class StatusCheck(GitHubModel): """ Status check Attributes: context: app: App ID as the source of the status check """ context: str app_id: Optional[int] = None @dataclass class RequiredStatusChecks(GitHubModel): """ Required status checks settings of a branch protection Attributes: url: URL to the required status checks strict: True to require status checks to pass before merging checks: List of status checks enforcement_level: Enforcement level of the required status checks """ url: str strict: bool checks: List[StatusCheck] enforcement_level: Optional[str] = None @dataclass class Restrictions(GitHubModel): """ Branch protection push restrictions Attributes: url: URL to the restrictions users_url: URL to the users of the restrictions teams_url: URL to the teams of the restrictions apps_url: URL to the apps of the restrictions users: List of restricted users teams: List of restricted teams apps: List of restricted apps """ url: str users_url: str teams_url: str apps_url: str users: List[User] teams: List[Team] apps: List[App] @dataclass class BranchProtectionFeature(GitHubModel): """ GitHub branch protection feature setting Attributes: enable: True if the feature is enabled url: REST API URL to change the feature """ enabled: bool url: Optional[str] = None @dataclass class BranchProtection(GitHubModel): """ GitHub branch protection information Attributes: url: URL to the branch protection rules required_status_checks: Required status check for the matching branches required_pull_request_reviews: Required pull request reviews for the matching branches. restrictions: Restrictions who can push to the matching branches. enforce_admins: Enforce the rules also for user in a admin role. required_linear_history: Require a linear history before merging. Restricts merging if the matching branch is out of date. allow_force_pushes: Allow force pushes to the matching branches. allow_deletions: Allow to delete the matching branches. block_creations: Restrict pushes that create matching branches. required_conversation_resolution: Require conversation resolution before merging. lock_branch: Mark matching branches as read-only. Users cannot push to matching branches. allow_fork_syncing: Whether users can pull changes from upstream when the matching branch is locked. required_signatures: Require git commit signatures. """ url: str required_status_checks: Optional[RequiredStatusChecks] = None required_pull_request_reviews: Optional[RequiredPullRequestReviews] = None restrictions: Optional[Restrictions] = None enforce_admins: Optional[BranchProtectionFeature] = None required_linear_history: Optional[BranchProtectionFeature] = None allow_force_pushes: Optional[BranchProtectionFeature] = None allow_deletions: Optional[BranchProtectionFeature] = None block_creations: Optional[BranchProtectionFeature] = None required_conversation_resolution: Optional[BranchProtectionFeature] = None lock_branch: Optional[BranchProtectionFeature] = None allow_fork_syncing: Optional[BranchProtectionFeature] = None required_signatures: Optional[BranchProtectionFeature] = None pontos-25.3.2/pontos/github/models/code_scanning.py000066400000000000000000000264311476255566300224110ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later from dataclasses import dataclass from datetime import datetime from typing import Optional from pontos.github.models.base import GitHubModel, User from pontos.github.models.organization import Repository from pontos.models import StrEnum class AlertState(StrEnum): """ State of a code scanning alert """ OPEN = "open" DISMISSED = "dismissed" FIXED = "fixed" class AlertSort(StrEnum): """ The property by which to sort the alerts """ CREATED = "created" UPDATED = "updated" class DismissedReason(StrEnum): """ The reason for dismissing or closing the alert """ FALSE_POSITIVE = "false positive" WONT_FIX = "won't fix" USED_IN_TESTS = "used in tests" class Severity(StrEnum): """ The severity of the alert """ NONE = "none" NOTE = "note" WARNING = "warning" ERROR = "error" class SecuritySeverityLevel(StrEnum): """ The security severity of the alert """ LOW = "low" MEDIUM = "medium" HIGH = "high" CRITICAL = "critical" class Classification(StrEnum): """ A classification of the file. For example to identify it as generated """ SOURCE = "source" GENERATED = "generated" TEST = "test" LIBRARY = "library" DOCUMENTATION = "documentation" @dataclass class Rule(GitHubModel): """ A rule used to detect the alert Attributes: id: A unique identifier for the rule used to detect the alert name: The name of the rule used to detect the alert severity: The severity of the alert security_severity_level: The security severity of the alert description: A short description of the rule used to detect the alert full_description: description of the rule used to detect the alert tags: A set of tags applicable for the rule help: Detailed documentation for the rule as GitHub Flavored Markdown help_uri: A link to the documentation for the rule used to detect the alert """ name: str description: str id: Optional[str] = None full_description: Optional[str] = None severity: Optional[Severity] = None security_severity_level: Optional[SecuritySeverityLevel] = None tags: Optional[list[str]] = None help: Optional[str] = None help_uri: Optional[str] = None @dataclass class Message(GitHubModel): """ Attributes: text: """ text: str @dataclass class Location(GitHubModel): """ Describes a region within a file for the alert Attributes: path: The file path in the repository start_line: Line number at which the vulnerable code starts in the file end_line: Line number at which the vulnerable code ends in the file start_column: The column at which the vulnerable code starts within the start line end_column: The column at which the vulnerable code ends within the end line """ path: str start_line: int end_line: int start_column: int end_column: int @dataclass class Instance(GitHubModel): """ Attributes: ref: The full Git reference, formatted as `refs/heads/`, `refs/pull//merge`, or `refs/pull//head` analysis_key: Identifies the configuration under which the analysis was executed. For example, in GitHub Actions this includes the workflow filename and job name environment: Identifies the variable values associated with the environment in which the analysis that generated this alert instance was performed, such as the language that was analyzed category: Identifies the configuration under which the analysis was executed. Used to distinguish between multiple analyses for the same tool and commit, but performed on different languages or different parts of the code state: State of a code scanning alert commit_sha: message: location: Describes a region within a file for the alert html_url: classifications: Classifications that have been applied to the file that triggered the alert. For example identifying it as documentation, or a generated file """ ref: str analysis_key: str environment: str category: str state: AlertState commit_sha: str message: Message location: Location html_url: Optional[str] = None classifications: Optional[list[Classification]] = None @dataclass class Tool(GitHubModel): """ A tool used to generate the code scanning analysis Attributes: name: The name of the tool used to generate the code scanning analysis version: The version of the tool used to generate the code scanning analysis guid: he GUID of the tool used to generate the code scanning analysis, if provided in the uploaded SARIF data """ name: str version: Optional[str] = None guid: Optional[str] = None @dataclass class CodeScanningAlert(GitHubModel): """ A GitHub Code Scanning Alert Attributes: number: The security alert number created_at: The time that the alert was created updated_at: The time that the alert was last updated url: The REST API URL of the alert resource html_url: The GitHub URL of the alert resource instances_url: The REST API URL for fetching the list of instances for an alert state: State of a code scanning alert fixed_at: The time that the alert was no longer detected and was considered fixed dismissed_by: A GitHub user who dismissed the alert dismissed_at: The time that the alert was dismissed dismissed_reason: The reason for dismissing or closing the alert dismissed_comment: The dismissal comment associated with the dismissal of the alert rule: The rule used to detect the alert tool: The tool used to generate the code scanning analysis most_recent_instance: repository: A GitHub repository """ number: int created_at: datetime url: str html_url: str instances_url: str state: AlertState rule: Rule tool: Tool most_recent_instance: Instance repository: Optional[Repository] = None updated_at: Optional[datetime] = None fixed_at: Optional[datetime] = None dismissed_by: Optional[User] = None dismissed_at: Optional[datetime] = None dismissed_reason: Optional[DismissedReason] = None dismissed_comment: Optional[str] = None @dataclass class Analysis(GitHubModel): """ Details for a code scanning analyses Attributes: ref: The full Git reference, formatted as `refs/heads/`, `refs/pull//merge`, or `refs/pull//head` commit_sha: The SHA of the commit to which the analysis you are uploading relates analysis_key: Identifies the configuration under which the analysis was executed. For example, in GitHub Actions this includes the workflow filename and job name environment: Identifies the variable values associated with the environment in which this analysis was performed category: Identifies the configuration under which the analysis was executed. Used to distinguish between multiple analyses for the same tool and commit, but performed on different languages or different parts of the code error: Error generated when processing the analysis created_at: The time that the analysis was created results_count: The total number of results in the analysis rules_count: The total number of rules used in the analysis id: Unique identifier for this analysis url: The REST API URL of the analysis resource sarif_id: An identifier for the upload tool: The tool used to generate the code scanning analysis deletable: warning: Warning generated when processing the analysis """ ref: str commit_sha: str analysis_key: str environment: str category: str error: str created_at: datetime results_count: int rules_count: int id: int url: str sarif_id: str tool: Tool deletable: bool warning: str @dataclass class CodeQLDatabase(GitHubModel): """ A CodeQL database Attributes: id: The ID of the CodeQL database name: The name of the CodeQL database language: The language of the CodeQL database uploader: A GitHub user content_type: The MIME type of the CodeQL database file size: The size of the CodeQL database file in bytes created_at: The date and time at which the CodeQL database was created updated_at: The date and time at which the CodeQL database was last updated url: The URL at which to download the CodeQL database commit_oid: The commit SHA of the repository at the time the CodeQL database was created """ id: int name: str language: str uploader: User content_type: str size: int created_at: datetime updated_at: datetime url: str commit_oid: Optional[str] = None class DefaultSetupState(StrEnum): """ State of a default setup """ CONFIGURED = "configured" NOT_CONFIGURED = "not-configured" class Language(StrEnum): """ Analyzed Language """ C_CPP = "c-cpp" CSHARP = "csharp" GO = "go" JAVA_KOTLIN = "java-kotlin" JAVASCRIPT_TYPESCRIPT = "javascript-typescript" JAVASCRIPT = "javascript" PYTHON = "python" RUBY = "ruby" TYPESCRIPT = "typescript" SWIFT = "swift" class QuerySuite(StrEnum): """ Used code scanning query suite """ DEFAULT = "default" EXTENDED = "extended" @dataclass class DefaultSetup(GitHubModel): """ Code scanning default setup configuration Attributes: state: Code scanning default setup has been configured or not languages: Languages to be analyzed query_suite: CodeQL query suite to be used updated_at: Timestamp of latest configuration update schedule: The frequency of the periodic analysis """ state: DefaultSetupState languages: list[Language] query_suite: QuerySuite updated_at: Optional[datetime] = None schedule: Optional[str] = None class SarifProcessingStatus(StrEnum): """ `pending` files have not yet been processed, while `complete` means results from the SARIF have been stored. `failed` files have either not been processed at all, or could only be partially processed """ PENDING = "pending" COMPLETE = "complete" FAILED = "failed" @dataclass class SarifUploadInformation(GitHubModel): """ Information about the SARIF upload Attributes: processing_status: Status of the SARIF processing analyses_url: The REST API URL for getting the analyses associated with the upload errors: Any errors that ocurred during processing of the delivery """ processing_status: SarifProcessingStatus analyses_url: Optional[str] = None errors: Optional[list[str]] = None pontos-25.3.2/pontos/github/models/dependabot.py000066400000000000000000000160201476255566300217150ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later from dataclasses import dataclass from datetime import datetime from typing import Optional from pontos.github.models.base import GitHubModel, User from pontos.github.models.organization import Repository from pontos.models import StrEnum class AlertSort(StrEnum): """ The property by which to sort the alerts """ CREATED = "created" UPDATED = "updated" class AlertState(StrEnum): """ State of the GitHub Dependabot Security Alert """ AUTO_DISMISSED = "auto_dismissed" DISMISSED = "dismissed" FIXED = "fixed" OPEN = "open" class DismissedReason(StrEnum): """ Reason phrase for a dismissed Dependabot alert """ FIX_STARTED = "fix_started" INACCURATE = "inaccurate" NO_BANDWIDTH = "no_bandwidth" NOT_USED = "not_used" TOLERABLE_RISK = "tolerable_risk" class DependencyScope(StrEnum): """ The execution scope of the vulnerable dependency """ DEVELOPMENT = "development" RUNTIME = "runtime" class Severity(StrEnum): """ The severity of the vulnerability """ LOW = "low" MEDIUM = "medium" HIGH = "high" CRITICAL = "critical" class IdentifierType(StrEnum): """ The type of advisory identifier """ CVE = "CVE" GHSA = "GHSA" @dataclass class VulnerablePackage(GitHubModel): """ Details about a vulnerable Package Attributes: ecosystem: The package's language or package management ecosystem name: The unique package name within its ecosystem """ ecosystem: str name: str @dataclass class PatchedVersion(GitHubModel): """ Details pertaining to the package version that patches a vulnerability Attributes: identifier: The package version that patches the vulnerability """ identifier: str @dataclass class Vulnerability(GitHubModel): """ Details pertaining to one vulnerable version range for the advisory Attributes: package: Details about the vulnerable package severity: The severity of the vulnerability vulnerable_version_range: Conditions that identify vulnerable versions of this vulnerability's package first_patched_version: The package version that patches this vulnerability """ package: VulnerablePackage severity: Severity vulnerable_version_range: str first_patched_version: Optional[PatchedVersion] = None @dataclass class Dependency(GitHubModel): """ Details for the vulnerable dependency Attributes: package: Details about the vulnerable package manifest_path: The full path to the dependency manifest file, relative to the root of the repository scope: The execution scope of the vulnerable dependency """ package: VulnerablePackage manifest_path: str scope: Optional[DependencyScope] = None @dataclass class CVSS(GitHubModel): """ Details for the advisory pertaining to the Common Vulnerability Scoring System (CVSS) Attributes: score: The overall CVSS score of the advisory vector_string: The full CVSS vector string for the advisory """ score: float vector_string: Optional[str] = None @dataclass class CWE(GitHubModel): """ Details for the advisory pertaining to Common Weakness Enumeration (CWE) Attributes: cwe_id: The unique CWE ID name: The short, plain text name of the CWE """ cwe_id: str name: str @dataclass class Identifier(GitHubModel): """ An advisory identifier Attributes: type: The type of advisory identifier value: The value of the advisory identifier """ type: IdentifierType value: str @dataclass class Reference(GitHubModel): """ A link to additional advisory information Attributes: url: URL for the additional advisory information """ url: str @dataclass class SecurityAdvisory(GitHubModel): """ Details for the GitHub Security Advisory Attributes: ghsa_id: The unique GitHub Security Advisory ID assigned to the advisory cve_id: The unique CVE ID assigned to the advisory summary: A short, plain text summary of the advisory description: A long-form Markdown-supported description of the advisory vulnerabilities: Vulnerable version range information for the advisory severity: The severity of the advisory cvss: The overall CVSS score of the advisory cwes: CWE weaknesses assigned to the advisory identifiers: Values that identify this advisory among security information sources references: Links to additional advisory information published_at: The time that the advisory was published updated_at: The time that the advisory was last modified withdrawn_at: The time that the advisory was withdrawn """ ghsa_id: str summary: str description: str vulnerabilities: list[Vulnerability] severity: Severity cvss: CVSS cwes: list[CWE] identifiers: list[Identifier] references: list[Reference] published_at: datetime updated_at: datetime cve_id: Optional[str] = None withdrawn_at: Optional[datetime] = None @dataclass class DependabotAlert(GitHubModel): """ A GitHub dependabot security alert model Attributes: number: The security alert number state: The state of the Dependabot alert dependency: Details for the vulnerable dependency security_advisory: Details for the GitHub Security Advisory security_vulnerability: Details pertaining to one vulnerable version range for the advisory url: The REST API URL of the alert resource html_url: The GitHub URL of the alert resource created_at: The time that the alert was created updated_at: The time that the alert was last updated dismissed_at: The time that the alert was dismissed dismissed_by: User who dismissed the alert dismissed_reason: The reason that the alert was dismissed dismissed_comment: An optional comment associated with the alert's dismissal fixed_at: The time that the alert was no longer detected and was considered fixed auto_dismissed_at: The time that the alert was auto-dismissed repository: The GitHub repository containing the alert. It's not returned when requesting a specific alert """ number: int state: AlertState dependency: Dependency security_advisory: SecurityAdvisory security_vulnerability: Vulnerability url: str html_url: str created_at: datetime updated_at: datetime repository: Optional[Repository] = None dismissed_at: Optional[datetime] = None dismissed_by: Optional[User] = None dismissed_reason: Optional[DismissedReason] = None dismissed_comment: Optional[str] = None fixed_at: Optional[datetime] = None auto_dismissed_at: Optional[datetime] = None pontos-25.3.2/pontos/github/models/organization.py000066400000000000000000000375341476255566300223310ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # from dataclasses import dataclass, field from datetime import datetime from typing import List, Optional from pontos.github.models.base import GitHubModel, User from pontos.models import StrEnum __all__ = ( "CodeOfConduct", "GitIgnoreTemplate", "InvitationRole", "License", "LicenseType", "MemberFilter", "MemberRole", "MergeCommitMessage", "MergeCommitTitle", "Organization", "Repository", "RepositoryPermissions", "RepositoryType", "SecurityAndAnalysis", "SecurityAndAnalysisStatus", "SecurityAndAnalysisType", "SquashMergeCommitMessage", "SquashMergeCommitTitle", ) class MergeCommitTitle(StrEnum): """ Merge commit title Attributes: PR_TITLE: Use pull request title MERGE_MESSAGE: Use provided merge commit message """ PR_TITLE = "PR_TITLE" MERGE_MESSAGE = "MERGE_MESSAGE" class MergeCommitMessage(StrEnum): """ Merge commit message setting Attributes: PR_BODY: Use pull request body PR_TITLE: Use pull request title BLANK: Leave it blank """ PR_BODY = "PR_BODY" PR_TITLE = "PR_TITLE" BLANK = "BLANK" class SquashMergeCommitTitle(StrEnum): """ Squash merge commit title Attributes: PR_TITLE: Use pull request title COMMIT_OR_PR_TITLE: Use pull request or commit title """ PR_TITLE = "PR_TITLE" COMMIT_OR_PR_TITLE = "COMMIT_OR_PR_TITLE" class SquashMergeCommitMessage(StrEnum): """ Squash merge commit message setting Attributes: PR_BODY: Use pull request body COMMIT_MESSAGES: Use commit messages BLANK: Leave it blank """ PR_BODY = "PR_BODY" COMMIT_MESSAGES = "COMMIT_MESSAGES" BLANK = "BLANK" class RepositoryType(StrEnum): """ A repository type Attributes: ALL: All repository types PUBLIC: Public repository PRIVATE: Private repository FORKS: Forked repository SOURCES: MEMBER: INTERNAL: """ ALL = "all" PUBLIC = "public" PRIVATE = "private" FORKS = "forks" SOURCES = "sources" MEMBER = "member" INTERNAL = "internal" @dataclass class License(GitHubModel): """ Software License Attributes: key: Key of the license name: Name of the license node_id: Node ID of the license url: URL to the license spdx_id: SPDX ID of the license html_url: URL to the web page of the license """ key: str name: str node_id: str url: Optional[str] = None spdx_id: Optional[str] = None html_url: Optional[str] = None @dataclass class RepositoryPermissions(GitHubModel): """ GitHub repository permissions Attributes: admin: push: pull: maintain: triage: """ admin: bool push: bool pull: bool maintain: Optional[bool] = None triage: Optional[bool] = None @dataclass class Organization(GitHubModel): """ A GitHub organization Attributes: avatar_url: URL to the avatar image events_url: URL to the events followers_url: URL to the followers following_url: URL to users which the organization is following gists_url: URL to gists of the organization html_url: URL to the web page of the organization id: ID of the organization login: Login name of the organization node_id: Node ID of the organization organizations_url: URL to the organization received_events_url: URL to the received events repos_url: URL to the list of repositories site_admin: starred_url: URL to the list of starring users subscriptions_url: type: Type of the organization url: URL to the organization email: Email address gravatar_id: ID of the connected gravatar account name: Name of the organization starred_at: """ avatar_url: str events_url: str followers_url: str following_url: str gists_url: str html_url: str id: int login: str node_id: str organizations_url: str received_events_url: str repos_url: str site_admin: bool starred_url: str subscriptions_url: str type: str url: str email: Optional[str] = None gravatar_id: Optional[str] = None name: Optional[str] = None starred_at: Optional[datetime] = None @dataclass class CodeOfConduct(GitHubModel): """ Code of Conduct Attributes: url: URL to the code of conduct key: Key of the code of conduct name: Name of the code of conduct html_url: URL to the web page of the code of conduct """ url: str key: str name: str html_url: str class SecurityAndAnalysisStatus(StrEnum): """ Security and analysis status Attributes: ENABLED: enabled DISABLED: disabled """ ENABLED = "enabled" DISABLED = "disabled" @dataclass class SecurityAndAnalysisType(GitHubModel): """ Security and analysis type Attributes: status: """ status: SecurityAndAnalysisStatus @dataclass class SecurityAndAnalysis(GitHubModel): """ Security and analysis Attributes: advanced_security: Status of GitHub Advanced Security is used dependabot_security_updates: Status of Dependabot security updates are used secret_scanning: Status of Secret Scanning is used secret_scanning_push_protection: Status of Secret Scanning Push Protection is used """ advanced_security: Optional[SecurityAndAnalysisType] = None dependabot_security_updates: Optional[SecurityAndAnalysisType] = None secret_scanning: Optional[SecurityAndAnalysisType] = None secret_scanning_push_protection: Optional[SecurityAndAnalysisType] = None @dataclass class Repository(GitHubModel): """ A GitHub repository model Attributes: archive_url: assignees_url: blobs_url: branches_url: collaborators_url: comments_url: commits_url: compare_url: contents_url: contributors_url: deployments_url: downloads_url: events_url: fork: forks_url: full_name: git_commits_url: git_refs_url: git_tags_url: hooks_url: html_url: id: issue_comment_url: issue_events_url: issues_url: keys_url: labels_url: languages_url: merges_url: milestones_url: name: node_id: notifications_url: owner: private: pulls_url: releases_url: stargazers_url: statuses_url: subscribers_url: subscription_url: tags_url: teams_url: trees_url: url: allow_auto_merge: allow_forking: allow_merge_commit: allow_rebase_merge: allow_squash_merge: allow_update_branch: anonymous_access_enabled: archived: clone_url: code_of_conduct: created_at: default_branch: delete_branch_on_merge: description: disabled: forks_count: forks: git_url: has_discussions: has_downloads: has_issues: has_pages: has_projects: has_wiki: homepage: is_template: language: license: merge_commit_title: merge_commit_message: mirror_url: network_count: open_issues_count: open_issues: organization: permissions: pushed_at: security_and_analysis: size: ssh_url: stargazers_count: subscribers_count: svn_url: squash_merge_commit_message: squash_merge_commit_title: temp_clone_token: topics: updated_at: Last modification date use_squash_pr_title_as_default: visibility: watchers_count: watchers: web_commit_signoff_required: """ archive_url: str assignees_url: str blobs_url: str branches_url: str collaborators_url: str comments_url: str commits_url: str compare_url: str contents_url: str contributors_url: str deployments_url: str downloads_url: str events_url: str fork: bool forks_url: str full_name: str git_commits_url: str git_refs_url: str git_tags_url: str hooks_url: str html_url: str id: int issue_comment_url: str issue_events_url: str issues_url: str keys_url: str labels_url: str languages_url: str merges_url: str milestones_url: str name: str node_id: str notifications_url: str owner: User private: bool pulls_url: str releases_url: str stargazers_url: str statuses_url: str subscribers_url: str subscription_url: str tags_url: str teams_url: str trees_url: str url: str allow_auto_merge: Optional[bool] = None allow_forking: Optional[bool] = None allow_merge_commit: Optional[bool] = None allow_rebase_merge: Optional[bool] = None allow_squash_merge: Optional[bool] = None allow_update_branch: Optional[bool] = None anonymous_access_enabled: Optional[bool] = None archived: Optional[bool] = None clone_url: Optional[str] = None code_of_conduct: Optional[CodeOfConduct] = None created_at: Optional[datetime] = None default_branch: Optional[str] = None delete_branch_on_merge: Optional[bool] = None description: Optional[str] = None disabled: Optional[bool] = None forks_count: Optional[int] = None forks: Optional[int] = None git_url: Optional[str] = None has_discussions: Optional[bool] = None has_downloads: Optional[bool] = None has_issues: Optional[bool] = None has_pages: Optional[bool] = None has_projects: Optional[bool] = None has_wiki: Optional[bool] = None homepage: Optional[str] = None is_template: Optional[bool] = None language: Optional[str] = None license: Optional[License] = None merge_commit_title: Optional[MergeCommitTitle] = None merge_commit_message: Optional[MergeCommitMessage] = None mirror_url: Optional[str] = None network_count: Optional[int] = None open_issues_count: Optional[int] = None open_issues: Optional[int] = None organization: Optional[Organization] = None permissions: Optional[RepositoryPermissions] = None pushed_at: Optional[datetime] = None security_and_analysis: Optional[SecurityAndAnalysis] = None size: Optional[int] = None ssh_url: Optional[str] = None stargazers_count: Optional[int] = None subscribers_count: Optional[int] = None svn_url: Optional[str] = None squash_merge_commit_message: Optional[SquashMergeCommitMessage] = None squash_merge_commit_title: Optional[SquashMergeCommitTitle] = None temp_clone_token: Optional[str] = None topics: Optional[List[str]] = field(default_factory=list[str]) updated_at: Optional[datetime] = None use_squash_pr_title_as_default: Optional[bool] = None visibility: Optional[str] = None watchers_count: Optional[int] = None watchers: Optional[int] = None web_commit_signoff_required: Optional[bool] = None class MemberFilter(StrEnum): """ A member filter Attributes: TWO_FA_DISABLED: Members with 2 factor authentication disabled ALL: All members """ TWO_FA_DISABLED = "2fa_disabled" ALL = "all" class MemberRole(StrEnum): """ A member role Attributes: ALL: All roles ADMIN: Admin only MEMBER: Member only """ ALL = "all" ADMIN = "admin" MEMBER = "member" class InvitationRole(StrEnum): """ A invitation role Attributes: ADMIN: Admin only DIRECT_MEMBER: Direct member only BILLING_MANAGER: Billing manager only """ ADMIN = "admin" DIRECT_MEMBER = "direct_member" BILLING_MANAGER = "billing_manager" class GitIgnoreTemplate(StrEnum): """ Just a small part of the available gitignore templates at https://github.com/github/gitignore Attributes: C: Template for C CPP: Template for C++ CMAKE: Template for CMake GO: Template for Golang JAVA: Template for Java MAVEN: Template for maven NODE: Template for Nodejs PYTHON: Template for Python RUST: Template for Rust """ C = "C" CPP = "C++" CMAKE = "CMake" GO = "Go" JAVA = "Java" MAVEN = "Maven" NODE = "Node" PYTHON = "Python" RUST = "Rust" class LicenseType(StrEnum): """ License Type Attributes: ACADEMIC_FREE_LICENSE_3_0: afl-3.0 APACHE_LICENSE_2_0: apache-2.0 ARTISTIC_LICENSE_2_0: artistic-2.0 BOOST_SOFTWARE_LICENSE_1_0: bsl-1.0 BSD_2_CLAUSE_SIMPLIFIED_LICENSE: bsd-2-clause BSD_3_CLAUSE_NEW_OR_REVISED_LICENSE: bsd-3-clause BSD_3_CLAUSE_CLEAR_LICENSE: bsd-3-clause-clear CREATIVE_COMMONS_LICENSE_FAMILY: cc CREATIVE_COMMONS_ZERO_1_0_UNIVERSAL: cc0-1.0 CREATIVE_COMMONS_ATTRIBUTION_4_0: cc-by-4.0 CREATIVE_COMMONS_ATTRIBUTION_SHARE_ALIKE_4_0: cc-by-sa-4.0 DO_WHAT_THE_F_CK_YOU_WANT_TO_PUBLIC_LICENSE: wtfpl EDUCATIONAL_COMMUNITY_LICENSE_2_0: ecl-2.0 ECLIPSE_PUBLIC_LICENSE_1_0: epl-1.0 ECLIPSE_PUBLIC_LICENSE_2_0: epl-2.0 EUROPEAN_UNION_PUBLIC_LICENSE_1_1: eupl-1.1 GNU_AFFERO_GENERAL_PUBLIC_LICENSE_3_0: agpl-3.0 GNU_GENERAL_PUBLIC_LICENSE_FAMILY: gpl GNU_GENERAL_PUBLIC_LICENSE_2_0: gpl-2.0 GNU_GENERAL_PUBLIC_LICENSE_3_0: gpl-3.0 GNU_LESSER_GENERAL_PUBLIC_LICENSE_FAMILY: lgpl GNU_LESSER_GENERAL_PUBLIC_LICENSE_2_1: lgpl-2.1 GNU_LESSER_GENERAL_PUBLIC_LICENSE_3_0: lgpl-3.0 ISC: isc LATEX_PROJECT_PUBLIC_LICENSE_1_3C_L: ppl-1.3c MICROSOFT_PUBLIC_LICENSE: ms-pl MIT: mit MOZILLA_PUBLIC_LICENSE_2_0: mpl-2.0 OPEN_SOFTWARE_LICENSE_3_0: osl-3.0 POSTGRESQL_LICENSE: postgresql SIL_OPEN_FONT_LICENSE_1_1: ofl-1.1 UNIVERSITY_OF_ILLINOIS_NCSA_OPEN_SOURCE_LICENSE: ncsa THE_UNLICENSE: unlicense ZLIB_LICENSE: zlib """ ACADEMIC_FREE_LICENSE_3_0 = "afl-3.0" APACHE_LICENSE_2_0 = "apache-2.0" ARTISTIC_LICENSE_2_0 = "artistic-2.0" BOOST_SOFTWARE_LICENSE_1_0 = "bsl-1.0" BSD_2_CLAUSE_SIMPLIFIED_LICENSE = "bsd-2-clause" BSD_3_CLAUSE_NEW_OR_REVISED_LICENSE = "bsd-3-clause" BSD_3_CLAUSE_CLEAR_LICENSE = "bsd-3-clause-clear" CREATIVE_COMMONS_LICENSE_FAMILY = "cc" CREATIVE_COMMONS_ZERO_1_0_UNIVERSAL = "cc0-1.0" CREATIVE_COMMONS_ATTRIBUTION_4_0 = "cc-by-4.0" CREATIVE_COMMONS_ATTRIBUTION_SHARE_ALIKE_4_0 = "cc-by-sa-4.0" DO_WHAT_THE_F_CK_YOU_WANT_TO_PUBLIC_LICENSE = "wtfpl" EDUCATIONAL_COMMUNITY_LICENSE_2_0 = "ecl-2.0" ECLIPSE_PUBLIC_LICENSE_1_0 = "epl-1.0" ECLIPSE_PUBLIC_LICENSE_2_0 = "epl-2.0" EUROPEAN_UNION_PUBLIC_LICENSE_1_1 = "eupl-1.1" GNU_AFFERO_GENERAL_PUBLIC_LICENSE_3_0 = "agpl-3.0" GNU_GENERAL_PUBLIC_LICENSE_FAMILY = "gpl" GNU_GENERAL_PUBLIC_LICENSE_2_0 = "gpl-2.0" GNU_GENERAL_PUBLIC_LICENSE_3_0 = "gpl-3.0" GNU_LESSER_GENERAL_PUBLIC_LICENSE_FAMILY = "lgpl" GNU_LESSER_GENERAL_PUBLIC_LICENSE_2_1 = "lgpl-2.1" GNU_LESSER_GENERAL_PUBLIC_LICENSE_3_0 = "lgpl-3.0" ISC = "isc" LATEX_PROJECT_PUBLIC_LICENSE_1_3C_L = "ppl-1.3c" MICROSOFT_PUBLIC_LICENSE = "ms-pl" MIT = "mit" MOZILLA_PUBLIC_LICENSE_2_0 = "mpl-2.0" OPEN_SOFTWARE_LICENSE_3_0 = "osl-3.0" POSTGRESQL_LICENSE = "postgresql" SIL_OPEN_FONT_LICENSE_1_1 = "ofl-1.1" UNIVERSITY_OF_ILLINOIS_NCSA_OPEN_SOURCE_LICENSE = "ncsa" THE_UNLICENSE = "unlicense" ZLIB_LICENSE = "zlib" pontos-25.3.2/pontos/github/models/packages.py000066400000000000000000000025051476255566300213710ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2024 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # from dataclasses import dataclass, field from pontos.github.models.base import GitHubModel, User from pontos.github.models.organization import Repository from pontos.models import StrEnum __all__ = [ "PackageType", "PackageVisibility", "Package", "Container", "PackageVersionMetadata", "PackageVersion", ] class PackageType(StrEnum): CONTAINER = "container" DOCKER = "docker" MAVEN = "maven" NPM = "npm" NUGET = "nuget" RUBYGEMS = "rubygems" class PackageVisibility(StrEnum): PUBLIC = "public" PRIVATE = "private" @dataclass class Package(GitHubModel): id: int name: str package_type: PackageType owner: User version_count: int visibility: PackageVisibility url: str created_at: str updated_at: str repository: Repository html_url: str @dataclass class Container(GitHubModel): tags: list[str] = field(default_factory=list) @dataclass class PackageVersionMetadata(GitHubModel): package_type: PackageType container: Container @dataclass class PackageVersion(GitHubModel): id: int name: str url: str package_html_url: str created_at: str updated_at: str html_url: str metadata: PackageVersionMetadata pontos-25.3.2/pontos/github/models/pull_request.py000066400000000000000000000325251476255566300223440ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # from dataclasses import dataclass, field from datetime import datetime from typing import List, Optional from pontos.github.models.base import FileStatus, GitHubModel, Team, User from pontos.github.models.organization import Repository from pontos.models import StrEnum __all__ = ( "AuthorAssociation", "AutoMerge", "Comment", "DiffEntry", "Label", "MergeMethod", "Milestone", "MilestoneState", "PullRequest", "PullRequestCommit", "PullRequestCommitDetails", "PullRequestCommitParent", "PullRequestCommitUser", "PullRequestCommitVerification", "PullRequestRef", "PullRequestState", "Stats", "Tree", ) @dataclass class PullRequestCommitUser(GitHubModel): """ User information of a commit Attributes: name: Name of the user email: Email address of the user date: """ name: str email: str date: datetime @dataclass class Tree(GitHubModel): """ Git tree information Attributes: url: URL to the tree sha: Git ID of the tree """ url: str sha: str @dataclass class PullRequestCommitVerification(GitHubModel): """ Verification details of a pull request commit Attributes: verified: True if the commit is verified reason: Details of the verification signature: Signature of the verification payload: Payload of the verification """ verified: bool reason: str signature: Optional[str] = None payload: Optional[str] = None @dataclass class PullRequestCommitDetails(GitHubModel): """ Detailed information of a pull request commit Attributes: comment_count: Number of comments message: Commit message tree: Commit tree url: URL to the pull request commit verification: Verification details of the pull request commit author: Author of the pull request commit committer: Committer of the pull request commit """ comment_count: int message: str tree: Tree url: str verification: PullRequestCommitVerification author: Optional[PullRequestCommitUser] = None committer: Optional[PullRequestCommitUser] = None @dataclass class PullRequestCommitParent(GitHubModel): """ Pull request parent commit information Attributes: url: URL to the parent commit sha: Git commit ID of the parent commit html_url: URL to the web page of the parent commit """ url: str sha: str html_url: Optional[str] = None @dataclass class Stats(GitHubModel): """ Pull request commit stats Attributes: additions: Number of additions deletions: Number of deletions total: Total number of changes """ additions: int deletions: int total: int @dataclass class DiffEntry(GitHubModel): """ Diff information of a pull request commit Attributes: additions: Number of additions blob_url: URL to the binary blob changes: Number of changes contents_url: URL to the contents deletions: Number of deletions filename: Corresponding file name raw_url: URL to the raw content sha: Git commit ID of the change status: File status patch: Patch of the diff previous_filename: Previous file name """ additions: int blob_url: str changes: int contents_url: str deletions: int filename: str raw_url: str sha: str status: FileStatus patch: Optional[str] = None previous_filename: Optional[str] = None @dataclass class PullRequestCommit(GitHubModel): """ Pull request commit Attributes: url: URL to the pull request commit sha: Git commit ID node_id: Node ID of the pull request commit html_url: URL to the web page of the pull request commit comments_url: URL to the pull request comments commit: Git commit object author: Author of the pull request commit stats: File stats of the pull request commit files: Diff information about the files in pull request commit committer: Committer of the pull request parents: List of parent commits """ url: str sha: str node_id: str html_url: str comments_url: str commit: PullRequestCommitDetails author: User stats: Optional[Stats] = None files: List[DiffEntry] = field(default_factory=list) committer: Optional[User] = None parents: List[PullRequestCommitParent] = field(default_factory=list) @dataclass class Label(GitHubModel): """ Pull request label Attributes: id: ID of the label node_id: Node ID of the label url: URL to the label name: Name of the label color: Color code of the label default: True if it is a default label description: Description of the label """ id: int node_id: str url: str name: str color: str default: bool description: Optional[str] = None class MilestoneState(StrEnum): """ State of a pull request milestone (open, closed) Attributes: OPEN: Milestone is open CLOSED: Milestone is closed """ OPEN = "open" CLOSED = "closed" @dataclass class Milestone(GitHubModel): """ Pull request milestone Attributes: closed_issues: Number of closed issues created_at: Creation date html_url: URL to the web page of the milestone id: ID of the milestone labels_url: URL to the labels of the milestone node_id: Node ID of the milestone number: Milestone number open_issues: Number of open issues in the milestone state: State of the milestone title: Title of the milestone updated_at: Last modification date url: URL of the milestone closed_at: Closed date creator: Use who created the milestone description: Description of the milestone due_on: Due date of the milestone """ closed_issues: int created_at: datetime html_url: str id: int labels_url: str node_id: str number: int open_issues: int state: MilestoneState title: str updated_at: datetime url: str closed_at: Optional[datetime] = None creator: Optional[User] = None description: Optional[str] = None due_on: Optional[datetime] = None @dataclass class PullRequestRef(GitHubModel): """ Pull request reference Attributes: label: Label of the pull request reference ref: Pull request reference name sha: Git commit ID of pull request reference user: User who created the pull request reference repo: Corresponding repository """ label: str ref: str sha: str user: User repo: Repository class PullRequestState(StrEnum): """ Pull request state Attributes: OPEN: Pull request is open CLOSED: Pull request is closed """ OPEN = "open" CLOSED = "closed" class AuthorAssociation(StrEnum): """ Pull request author association Attributes: COLLABORATOR: Author is a collaborator CONTRIBUTOR: Author is a contributor FIRST_TIMER: First time pull request FIRST_TIME_CONTRIBUTOR: Author is a first time contributor MANNEQUIN: Author is a mannequin MEMBER: Author is a member NONE: None OWNER: Author is owner """ COLLABORATOR = "COLLABORATOR" CONTRIBUTOR = "CONTRIBUTOR" FIRST_TIMER = "FIRST_TIMER" FIRST_TIME_CONTRIBUTOR = "FIRST_TIME_CONTRIBUTOR" MANNEQUIN = "MANNEQUIN" MEMBER = "MEMBER" NONE = "NONE" OWNER = "OWNER" @dataclass class Reactions(GitHubModel): """ Reaction Rollup Attributes: url: URL to the reactions total_count: int laugh: int confused: int heart: int hooray: int eyes: int rocket: int """ url: str total_count: int laugh: int confused: int heart: int hooray: int eyes: int rocket: int @dataclass class Comment(GitHubModel): """ A single comment of a pull request/issue Attributes: id: Unique identifier of the comment node_id: Node ID of the comment url: URL for the issue comment body: Contents of the issue comment html_url: URL to the web page of the comment created_at: Creation date updated_at: Last modification date issue_url: URL to the pull request author_association: How the author is associated with the repository user: Author of the comment reactions: Reactions to the comment """ id: int node_id: str url: str html_url: str issue_url: str created_at: datetime updated_at: datetime author_association: AuthorAssociation body: Optional[str] = None user: Optional[User] = None reactions: Optional[Reactions] = None class MergeMethod(StrEnum): """ The (auto) merge method Attributes: MERGE: Create a merge commit SQUASH: Squash commits into a single commit REBASE: Rebase commits onto the target branch """ MERGE = "merge" SQUASH = "squash" REBASE = "rebase" @dataclass class AutoMerge(GitHubModel): """ Auto merge information Attributes: enabled_by: User who enabled the auto merge merge_method: Method that is used for the auto merge commit_title: Commit title of the auto merge commit_message: Commit message of the auto merge """ enabled_by: User merge_method: MergeMethod commit_title: str commit_message: str @dataclass class PullRequest(GitHubModel): """ A GitHub pull request Attributes: additions: Number of changes author_association: Author role base: Reference to the source branch changed_files: Number of changed files comments_url: URL to the pull request comments comments: Number of comments commits_url: URL to the pull request commits commits: Number of commits created_at: Creation date deletions: Number of deletions diff_url: URL to the diff view head: Reference to the target branch html_url: URL to the web page of the pull request id: ID of the pull request issue_url: URL to the pull request locked: True if the pull request is locked maintainer_can_modify: True if the maintainer can modify the pull request mergeable_state: Mergeable state merged: True if the pull request is merged node_id: Node ID of the pull request number: Pull request number patch_url: URL to the diff patch review_comment_url: review_comments_url: URL to the reviewer comments review_comments: Number of reviewer comments state: State of the pull request statuses_url: URL of the pull request statuses title: Pull request title updated_at: Last modification date url: URL to the pull request user: User who created the pull request active_lock_reason: Optional[str] = None assignee: Assigned user assignees: List of assigned users auto_merge: True if the pull request should be merged automatically body: Body text of the pull request closed_at: Date when the pull request was closed draft: True if the pull request is a draft labels: List of assigned labels merge_commit_sha: Git commit ID of the merge commit mergeable: True if the pull request is mergeable merged_at: Date when the pull request got merged merged_by: User who merged the pull request milestone: A connected milestone rebaseable: True if the pull request is rebaseable requested_reviewers: List of users requested as reviewers requested_teams: List of teams requested as reviewers """ additions: int author_association: AuthorAssociation base: PullRequestRef changed_files: int comments_url: str comments: int commits_url: str commits: int created_at: datetime deletions: int diff_url: str head: PullRequestRef html_url: str id: int issue_url: str locked: bool maintainer_can_modify: bool mergeable_state: str merged: bool node_id: str number: int patch_url: str review_comment_url: str review_comments_url: str review_comments: int state: PullRequestState statuses_url: str title: str updated_at: datetime url: str user: User active_lock_reason: Optional[str] = None assignee: Optional[User] = None assignees: List[User] = field(default_factory=list) auto_merge: Optional[AutoMerge] = None body: Optional[str] = None closed_at: Optional[datetime] = None draft: Optional[bool] = None labels: List[Label] = field(default_factory=list) merge_commit_sha: Optional[str] = None mergeable: Optional[bool] = None merged_at: Optional[datetime] = None merged_by: Optional[User] = None milestone: Optional[Milestone] = None rebaseable: Optional[bool] = None requested_reviewers: List[User] = field(default_factory=list) requested_teams: List[Team] = field(default_factory=list) pontos-25.3.2/pontos/github/models/release.py000066400000000000000000000076431476255566300212430ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # from dataclasses import dataclass, field from datetime import datetime from typing import List, Optional from pontos.github.models.base import GitHubModel, User from pontos.models import StrEnum __all__ = ( "Release", "ReleaseAsset", "ReleaseAssetState", "ReleaseReactions", ) class ReleaseAssetState(StrEnum): """ State of a release asset Attributes: UPLOADED: Uploaded OPEN: Open """ UPLOADED = "uploaded" OPEN = "open" @dataclass class ReleaseAsset(GitHubModel): """ A GitHub release asset model Attributes: url: URL of the release asset browser_download_url: Direct URL to download the asset from id: ID of the asset node_id: Node ID of the asset name: Name of the asset state: State of the asset content_type: MIME content type of the asset size: Size of the asset download_count: Number of downloads created_at: Creation date updated_at: Upload date label: Label of the asset uploader: User who uploaded the asset """ url: str browser_download_url: str id: int node_id: str name: str state: ReleaseAssetState content_type: str size: int download_count: int created_at: datetime updated_at: datetime label: Optional[str] = None uploader: Optional[User] = None @dataclass class ReleaseReactions(GitHubModel): """ Reactions to a GitHub release Attributes: url: URL to the release reactions total_count: Total number of reactions laugh: Number of user reacted with laugh confused: Number of user reacted with confused heart: Number of user reacted with heart hooray: Number of user reacted with hooray eyes: Number of user reacted with eyes rocket: Number of user reacted with rocket """ url: str total_count: int laugh: int confused: int heart: int hooray: int eyes: int rocket: int @dataclass class Release(GitHubModel): """ A GitHub release model Attributes: assets_url: URL to the release assets created_at: Creation Date draft: True if the release is a draft html_url: URL to the web page of the release id: ID of the release node_id: Node ID of the release prerelease: True if the release is a pre release tag_name: Name of the tag referenced by the release target_commitish: Git commit ID of the tag references by the release upload_url: URL to upload release assets to url: URL of the release assets: Information about the release assets author: User created the release body_html: Body of the release as HTML body_text: Body of the release as text body: Body of the release discussion_url: URL to the release discussion mentions_count: name: Name of the release published_at: Publication date of the release reactions: Reaction information tarball_url: URL to the tarball archive of the release zipball_url: URL to the zip archive of the release """ assets_url: str created_at: datetime draft: bool html_url: str id: int node_id: str prerelease: bool tag_name: str target_commitish: str upload_url: str url: str assets: List[ReleaseAsset] = field(default_factory=list) author: Optional[User] = None body_html: Optional[str] = None body_text: Optional[str] = None body: Optional[str] = None discussion_url: Optional[str] = None mentions_count: Optional[int] = None name: Optional[str] = None published_at: Optional[datetime] = None reactions: Optional[ReleaseReactions] = None tarball_url: Optional[str] = None zipball_url: Optional[str] = None pontos-25.3.2/pontos/github/models/search.py000066400000000000000000000066701476255566300210670ustar00rootroot00000000000000# Copyright (C) 2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # from abc import ABC from pontos.models import StrEnum from .base import SortOrder __all__ = ( "InDescriptionQualifier", "InNameQualifier", "InReadmeQualifier", "InTopicsQualifier", "IsPrivateQualifier", "IsPublicQualifier", "NotQualifier", "OrganizationQualifier", "Qualifier", "RepositoryQualifier", "RepositorySort", "SortOrder", "UserQualifier", ) class RepositorySort(StrEnum): """ Sort repositories by Attributes: STARS: GitHub starts FORKS: GitHub forks HELP_WANTED_ISSUES: Number of issues with help wanted label UPDATED: Last updated """ STARS = "stars" FORKS = "forks" HELP_WANTED_ISSUES = "help-wanted-issues" UPDATED = "updated" class Qualifier(ABC): """ An abstract base class for search qualifiers Attributes: operator: The search operator term: The search term """ operator: str term: str def __str__(self) -> str: """ """ return f"{self.operator}:{self.term}" class NotQualifier(Qualifier): """ Qualifier for negating another qualifier Example: Exclude a repository from a search .. code-block:: python from pontos.github.models import NotQualifier, RepositoryQualifier qualifier = NotQualifier(RepositoryQualifier("foo/bar")) """ def __init__(self, qualifier: Qualifier) -> None: self.qualifier = qualifier def __str__(self) -> str: return f"-{str(self.qualifier)}" class InQualifier(Qualifier): operator = "in" class InNameQualifier(InQualifier): """ Qualifier for searching in repository names """ term = "name" class InDescriptionQualifier(InQualifier): """ Qualifier for searching in repository descriptions """ term = "description" class InTopicsQualifier(InQualifier): """ Qualifier for searching in repository topics """ term = "topics" class InReadmeQualifier(InQualifier): """ Qualifier for searching in repository READMEs """ term = "readme" class RepositoryQualifier(Qualifier): """ Qualifier for searching within a specific repository """ operator = "repo" def __init__(self, repository: str) -> None: """ Search within a repository Args: repository: owner/repo """ self.term = repository class OrganizationQualifier(Qualifier): """ Qualifier for searching within a specific organization """ operator = "org" def __init__(self, organization: str) -> None: """ Search within an organization Args: organization: Name of the organization to search within """ self.term = organization class UserQualifier(Qualifier): """ Qualifier for searching within a specific user space """ operator = "user" def __init__(self, user: str) -> None: """ Search within an user space Args: user: Name of the user """ self.term = user class IsPublicQualifier(Qualifier): """ Qualifier for searching for public repositories """ operator = "is" term = "public" class IsPrivateQualifier(Qualifier): """ Qualifier for searching for private repositories """ operator = "is" term = "private" pontos-25.3.2/pontos/github/models/secret_scanning.py000066400000000000000000000121551476255566300227620ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later from dataclasses import dataclass from datetime import datetime from typing import Optional, Union from pontos.github.models.base import GitHubModel, User from pontos.github.models.organization import Repository from pontos.models import StrEnum class AlertSort(StrEnum): """ The property by which to sort the alerts """ CREATED = "created" UPDATED = "updated" class AlertState(StrEnum): """ State of the GitHub Secrets Scanning Alert """ OPEN = "open" RESOLVED = "resolved" class Resolution(StrEnum): """ The reason for resolving the alert """ FALSE_POSITIVE = "false_positive" WONT_FIX = "wont_fix" REVOKED = "revoked" USED_IN_TESTS = "used_in_tests" class LocationType(StrEnum): """ Type of location """ COMMIT = "commit" ISSUE_TITLE = "issue_title" ISSUE_BODY = "issue_body" ISSUE_COMMENT = "issue_comment" @dataclass class SecretScanningAlert(GitHubModel): """ A GitHub Secret Scanning Alert Attributes: number: The security alert number url: The REST API URL of the alert resource html_url: The GitHub URL of the alert resource locations_url: The REST API URL of the code locations for this alert state: Sets the state of the secret scanning alert. A `resolution` must be provided when the state is set to `resolved`. created_at: The time that the alert was created updated_at: The time that the alert was last updated resolution: Required when the `state` is `resolved` resolved_at: The time that the alert was resolved resolved_by: A GitHub user who resolved the alert secret_type: The type of secret that secret scanning detected secret_type_display_name: User-friendly name for the detected secret secret: The secret that was detected repository: The GitHub repository containing the alert. It's not returned when requesting a specific alert push_protection_bypassed: Whether push protection was bypassed for the detected secret push_protection_bypassed_by: A GitHub user who bypassed the push protection push_protection_bypassed_at: The time that push protection was bypassed resolution_comment: The comment that was optionally added when this alert was closed """ number: int url: str html_url: str locations_url: str state: AlertState secret_type: str secret_type_display_name: str secret: str created_at: datetime repository: Optional[Repository] = None updated_at: Optional[datetime] = None resolution: Optional[Resolution] = None resolved_at: Optional[datetime] = None resolved_by: Optional[User] = None push_protection_bypassed: Optional[bool] = None push_protection_bypassed_by: Optional[User] = None push_protection_bypassed_at: Optional[datetime] = None resolution_comment: Optional[str] = None @dataclass class CommitLocation(GitHubModel): """ Represents a 'commit' secret scanning location type Attributes: path: The file path in the repository start_line: Line number at which the secret starts in the file end_line: Line number at which the secret ends in the file start_column: The column at which the secret starts within the start line end_column: The column at which the secret ends within the end line blob_sha: SHA-1 hash ID of the associated blob blob_url: The API URL to get the associated blob resource commit_sha: SHA-1 hash ID of the associated commit commit_url: The API URL to get the associated commit resource """ path: str start_line: int end_line: int start_column: int end_column: int blob_sha: str blob_url: str commit_sha: str commit_url: str @dataclass class IssueTitleLocation(GitHubModel): """ Represents an 'issue_title' secret scanning location type Attributes: issue_title_url: The API URL to get the issue where the secret was detected """ issue_title_url: str @dataclass class IssueBodyLocation(GitHubModel): """ Represents an 'issue_body' secret scanning location type Attributes: issue_body_url: The API URL to get the issue where the secret was detected """ issue_body_url: str @dataclass class IssueCommentLocation(GitHubModel): """ Represents an 'issue_comment' secret scanning location type Attributes: issue_comment_url: he API URL to get the issue comment where the secret was detected """ issue_comment_url: str @dataclass class AlertLocation(GitHubModel): """ Location where the secret was detected Attributes: type: The location type details: Details about the location where the secret was detected """ type: LocationType details: Union[ CommitLocation, IssueTitleLocation, IssueBodyLocation, IssueCommentLocation, ] pontos-25.3.2/pontos/github/models/tag.py000066400000000000000000000076451476255566300204000ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # from dataclasses import dataclass from datetime import datetime from typing import Optional from pontos.github.models.base import GitHubModel from pontos.models import StrEnum __all__ = ( "GitObjectType", "Tag", "Tagger", "Verification", "VerificationReason", ) class GitObjectType(StrEnum): """ A git object type Attributes: COMMIT: A commit object TREE: A tree object BLOB: A blob object """ COMMIT = "commit" TREE = "tree" BLOB = "blob" @dataclass class GitObject(GitHubModel): """ A GitHub GitObject model Attributes: sha: The sha (git ID) of the object type: Type of the Git object (commit, tree or blob) url: URL to the git object """ sha: str type: GitObjectType url: str @dataclass class Tagger(GitHubModel): """ GitHub user who created a tag Attributes: date: Date of the tag email: Email address of the user name: Name of the user """ date: datetime email: str name: str class VerificationReason(StrEnum): """ Verification reason details Attributes: EXPIRED_KEY: Signature key has expired NOT_SIGNING_KEY: No signature key available GPGVERIFY_ERROR: GPG verification error GPGVERIFY_UNAVAILABLE: GPG verification not available UNSIGNED: Not signed UNKNOWN_SIGNATURE_TYPE: Unknown signature type NO_USER: No user UNVERIFIED_EMAIL: Email address not verified BAD_EMAIL: Bad email address UNKNOWN_KEY: Unknown signature key MALFORMED_SIGNATURE: Malformed signature INVALID: Invalid signature VALID: Valid signature """ EXPIRED_KEY = "expired_key" NOT_SIGNING_KEY = "not_signing_key" GPGVERIFY_ERROR = "gpgverify_error" GPGVERIFY_UNAVAILABLE = "gpgverify_unavailable" UNSIGNED = "unsigned" UNKNOWN_SIGNATURE_TYPE = "unknown_signature_type" NO_USER = "no_user" UNVERIFIED_EMAIL = "unverified_email" BAD_EMAIL = "bad_email" UNKNOWN_KEY = "unknown_key" MALFORMED_SIGNATURE = "malformed_signature" INVALID = "invalid" VALID = "valid" @dataclass class Verification(GitHubModel): """ Verification details of a tag Attributes: verified: True if the tag is verified reason: Details of the reason of the verification status payload: Payload of the verification signature: Signature of the verification """ verified: bool reason: VerificationReason payload: Optional[str] = None signature: Optional[str] = None @dataclass class Tag(GitHubModel): """ A GitHub tag model Attributes: node_id: Node ID of the tag tag: The tag name sha: The corresponding sha (git ID) url: URL to the tag message: The git commit message of the tag tagger: The creator of the tag object: The corresponding git object verification: The verification status of the tag """ node_id: str tag: str sha: str url: str message: str tagger: Tagger object: GitObject verification: Optional[Verification] = None @dataclass class Commit(GitHubModel): """ A GitHub commit model, storing URL and SHA of a commit Attributes: sha: Commits SHA hash url: Commits URL """ sha: str url: str @dataclass class RepositoryTag(GitHubModel): """ A GitHub tag model, when accessing all tags of a repository Attributes: node_id: Node ID of the tag name: The tag name zipball_url: Link to the tags zip ball content tarball_url: Link to the tags tar ball content commit: SHA and URL to the commit the tag points to """ node_id: str name: str zipball_url: str tarball_url: str commit: Commit pontos-25.3.2/pontos/github/models/user.py000066400000000000000000000025211476255566300205670ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later from dataclasses import dataclass from datetime import datetime from typing import Optional from pontos.github.models.base import GitHubModel @dataclass class SSHPublicKey(GitHubModel): """ A public SSH key of a user Attributes: id: ID of the SSH key key: SSH Key """ id: int key: str @dataclass class SSHPublicKeyExtended(GitHubModel): """ Extended details of public SSH key of a user Attributes: id: ID of the SSH key key: SSH Key url: title: created_at verified: read_only: """ id: int key: str url: str title: str created_at: datetime verified: bool read_only: bool @dataclass class EmailInformation(GitHubModel): """ Information about an email address stored in GitHub Attributes: email: The email address primary: True if it is the primary email address of the user verified: True if the email address is verified visibility: public, private """ email: str primary: bool verified: bool # visibility should be an enum but the schema didn't define the possible # values. therefore be safe and just use a str visibility: Optional[str] = None pontos-25.3.2/pontos/github/models/workflow.py000066400000000000000000000251741476255566300214740ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # from dataclasses import dataclass, field from datetime import datetime from typing import Dict, List, Optional from pontos.github.models.base import Event, GitHubModel, User from pontos.models import StrEnum __all__ = ( "Workflow", "WorkflowState", "WorkflowRun", "WorkflowRunCommit", "WorkflowRunCommitUser", "WorkflowRunRepository", "WorkflowRunStatus", "WorkflowRunWorkflow", ) @dataclass class WorkflowRunCommitUser(GitHubModel): """ User information of a workflow run commit Attributes: name: Name of the user email: Email address of the user """ name: str email: str @dataclass class WorkflowRunCommit(GitHubModel): """ GitHub workflow run commit reference Attributes: id: ID of the commit tree_id: Tree ID of the commit message: Message of the commit timestamp: Timestamp of the commit author: Author of the commit committer: Committer of the commit """ id: str tree_id: str message: str timestamp: datetime author: Optional[WorkflowRunCommitUser] = None committer: Optional[WorkflowRunCommitUser] = None class WorkflowState(StrEnum): """ State of a workflow Attributes: ACTIVE: Workflow is active DELETED: Workflow is deleted DISABLED_FORK: Workflow is disabled because it is run from a fork DISABLED_INACTIVITY: Workflow is disabled because if inactivity DISABLED_MANUALLY: Workflow is disabled manually """ ACTIVE = "active" DELETED = "deleted" DISABLED_FORK = "disabled_fork" DISABLED_INACTIVITY = "disabled_inactivity" DISABLED_MANUALLY = "disabled_manually" @dataclass class Workflow(GitHubModel): """ GitHub workflow Attributes: id: ID of the workflow node_id: Node ID of the workflow name: Name of the workflow path: Path of the workflow file state: State of the workflow created_at: Creation date updated_at: Last modification date url: URL to the workflow html_url: URL to the web page of the workflow badge_url: URL to the workflow status badge deleted_at: Deletion date of the workflow """ id: int node_id: str name: str path: str state: WorkflowState created_at: datetime updated_at: datetime url: str html_url: str badge_url: str deleted_at: Optional[datetime] = None @dataclass class WorkflowRunRepository(GitHubModel): """ GitHub workflow run repository Attributes: id: ID of the repository url: URL to the repository name: Name of the repository node_id: Node ID of the repository full_name: Full name of the repository owner: Owner of the repository private: True if the repository is private html_url: URL to the web page of the repository description: Description of the repository fork: True if the repository is a fork archive_url: URL to the archive of the repository assignees_url: URL to the assignees blobs_url: URL to the binary blobs branches_url: URL to the branches collaborators_url: URL to the collaborators comments_url: URL to the comments commits_url: URL to commits compare_url: URL to compare contents_url: URL to the contents contributors_url: URL to the contributors deployments_url: URL to deployments downloads_url: URL to downloads events_url: URL to the events forks_url: URL to the forks of the repository git_commits_url: URL to the commits of the repository git_refs_url: URL to the git refs git_tags_url: URL to the git tags git_url: Git clone URL issue_comment_url: URL to the issue comments issue_events_url: URL to the issue events issues_url: URL to the issues keys_url: URL to the keys labels_url: URL to the labels languages_url: URL to the languages merges_url: URL to the merges milestones_url: URL to the milestones notifications_url: URL to the notifications pulls_url: URL to the pull requests releases_url: URL to releases ssh_url: Git clone URL using ssh stargazers_url: URL to the stargazers statuses_url: URL to the statuses subscribers_url: URL to the subscribers subscription_url: URL to subscribe to the repository tags_url: URL to the tags teams_url: URL to the teams trees_url: URL to the trees hooks_url: URL to the hooks """ id: int url: str name: str node_id: str full_name: Optional[str] = None owner: Optional[User] = None private: Optional[bool] = None html_url: Optional[str] = None description: Optional[str] = None fork: Optional[bool] = None archive_url: Optional[str] = None assignees_url: Optional[str] = None blobs_url: Optional[str] = None branches_url: Optional[str] = None collaborators_url: Optional[str] = None comments_url: Optional[str] = None commits_url: Optional[str] = None compare_url: Optional[str] = None contents_url: Optional[str] = None contributors_url: Optional[str] = None deployments_url: Optional[str] = None downloads_url: Optional[str] = None events_url: Optional[str] = None forks_url: Optional[str] = None git_commits_url: Optional[str] = None git_refs_url: Optional[str] = None git_tags_url: Optional[str] = None git_url: Optional[str] = None issue_comment_url: Optional[str] = None issue_events_url: Optional[str] = None issues_url: Optional[str] = None keys_url: Optional[str] = None labels_url: Optional[str] = None languages_url: Optional[str] = None merges_url: Optional[str] = None milestones_url: Optional[str] = None notifications_url: Optional[str] = None pulls_url: Optional[str] = None releases_url: Optional[str] = None ssh_url: Optional[str] = None stargazers_url: Optional[str] = None statuses_url: Optional[str] = None subscribers_url: Optional[str] = None subscription_url: Optional[str] = None tags_url: Optional[str] = None teams_url: Optional[str] = None trees_url: Optional[str] = None hooks_url: Optional[str] = None class WorkflowRunStatus(StrEnum): """ Status of a workflow run Attributes: ACTION_REQUIRED: Use action is required CANCELLED: The workflow run is canceled COMPLETED: The workflow run is completed FAILURE: The workflow run failed IN_PROGRESS: The workflow run is in progress NEUTRAL: Neutral QUEUED: The workflow run is queued REQUESTED: The workflow run is requested SKIPPED: The workflow run is skipped STALE: The workflow run is stale SUCCESS: The workflow run is finished successfully TIMED_OUT: The workflow run has timed out WAITING: The workflow run is waiting PENDING: The workflow run is pending """ ACTION_REQUIRED = "action_required" CANCELLED = "cancelled" COMPLETED = "completed" FAILURE = "failure" IN_PROGRESS = "in_progress" NEUTRAL = "neutral" QUEUED = "queued" REQUESTED = "requested" SKIPPED = "skipped" STALE = "stale" SUCCESS = "success" TIMED_OUT = "timed_out" WAITING = "waiting" PENDING = "pending" # not listed in GitHub docs @dataclass class WorkflowRunWorkflow(GitHubModel): """ GitHub workflow of a workflow run Attributes: path: Path to the workflow file sha: Git commit ID of the workflow file ref: """ path: str sha: str ref: Optional[str] = None @dataclass class WorkflowRun(GitHubModel): """ GitHub workflow run Attributes: artifacts_url: URL to created artifacts within the workflow run cancel_url: URL to cancel the workflow run check_suite_url: URL to the status checks created_at: Creation date of the workflow run event: Event that triggered the workflow run head_repository: head_sha: html_url: URL to the web page of the workflow run id: ID of the workflow run jobs_url: URL to the workflow run jobs logs_url: URL to the workflow run logs node_id: Node ID of the workflow run repository: Corresponding repository of the workflow run rerun_url: URL to rerun the workflow run_number: Number of the run updated_at: Last modification date url: URL to the workflow run workflow_id: ID of the corresponding workflow workflow_url: URL to the corresponding workflow actor: User that runs the workflow check_suite_id: check_suite_node_id: conclusion: Conclusion of the workflow run display_title: Displayed title of the workflow run head_branch: head_commit: head_repository_id: name: Name of the workflow path: Path to the workflow file previous_attempt_url: URL to the previous workflow run attempt pull_requests: List of pull requests referenced_workflows: List of references workflows run_attempt: Number of the run attempt run_started_at: Date the run started at status: Status of the workflow run triggering_actor: User who triggered the workflow run """ artifacts_url: str cancel_url: str check_suite_url: str created_at: datetime event: Event head_repository: WorkflowRunRepository head_sha: str html_url: str id: int jobs_url: str logs_url: str node_id: str repository: WorkflowRunRepository rerun_url: str run_number: int updated_at: datetime url: str workflow_id: int workflow_url: str actor: Optional[User] = None check_suite_id: Optional[int] = None check_suite_node_id: Optional[str] = None conclusion: Optional[str] = None display_title: Optional[str] = None head_branch: Optional[str] = None head_commit: Optional[WorkflowRunCommit] = None head_repository_id: Optional[int] = None name: Optional[str] = None path: Optional[str] = None previous_attempt_url: Optional[str] = None pull_requests: List[Dict] = field(default_factory=list) referenced_workflows: List[WorkflowRunWorkflow] = field( default_factory=list ) run_attempt: Optional[int] = None run_started_at: Optional[datetime] = None status: Optional[WorkflowRunStatus] = None triggering_actor: Optional[User] = None pontos-25.3.2/pontos/github/pr_template.md000066400000000000000000000014261476255566300206150ustar00rootroot00000000000000**What**: **Why**: **How**: **Checklist**: - [ ] Tests - [ ] Conventional commit message - [ ] Documentation pontos-25.3.2/pontos/github/script/000077500000000000000000000000001476255566300172605ustar00rootroot00000000000000pontos-25.3.2/pontos/github/script/__init__.py000066400000000000000000000054121476255566300213730ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # """ Load and run Pontos GitHub Scripts A Pontos GitHub Script is a Python module that has a github_script coroutine function and optionally a add_script_arguments function. These functions should have the following signatures: .. code-block:: python async def github_script(api: GitHubAsyncRESTApi, args: Namespace) -> int: def add_script_arguments(parser: ArgumentParser) -> None: Example: .. code-block:: python :caption: Example Python module containing a Pontos GitHub Script def add_script_arguments(parser: ArgumentParser) -> None: parser.add_argument("repository") async def github_script(api: GitHubAsyncRESTApi, args: Namespace) -> int: repo = await api.repositories.get(args.repository) print(repo.html_url, repo.description) return 0 """ import json import sys from argparse import ArgumentParser import httpx from pontos.errors import PontosError from ._parser import create_parser from .errors import GitHubScriptError from .load import ( load_script, run_add_arguments_function, run_github_script_function, ) __all__ = ( "GitHubScriptError", "load_script", "run_add_arguments_function", "run_github_script_function", ) def main(): """ CLI function to run a Pontos GitHub Script """ parser = create_parser() known_args, _ = parser.parse_known_args() try: with load_script(known_args.script) as module: # using a child parser allows for adding --help including the script # arguments child_parser = ArgumentParser(parents=[parser]) run_add_arguments_function(module, child_parser) args = child_parser.parse_args() token = args.token timeout = args.timeout retval = run_github_script_function(module, token, timeout, args) sys.exit(retval) except KeyboardInterrupt: sys.exit(1) except httpx.HTTPStatusError as e: try: error = e.response.json() message = error.get("message") print( f"Got HTTP status {e.response.status_code} while running " f"script {known_args.script} and doing a {e.request.method} " f"request to {e.request.url}. Error was: {message}. ", file=sys.stderr, ) except json.JSONDecodeError: # not a json response print(e, file=sys.stderr) except httpx.ResponseNotRead: # a streaming response failed print(e, file=sys.stderr) sys.exit(1) except PontosError as e: print(e, file=sys.stderr) sys.exit(1) if __name__ == "__main__": main() pontos-25.3.2/pontos/github/script/_parser.py000066400000000000000000000016341476255566300212710ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # import os from argparse import ArgumentParser import shtab from pontos.github.api.helper import DEFAULT_TIMEOUT GITHUB_TOKEN = "GITHUB_TOKEN" def create_parser() -> ArgumentParser: """ Create a CLI parser for running Pontos GitHub Scripts Returns: A new ArgumentParser instance add the default arguments """ parser = ArgumentParser(add_help=False) shtab.add_argument_to(parser) parser.add_argument( "--token", default=os.environ.get(GITHUB_TOKEN), help="GitHub Token. Defaults to GITHUB_TOKEN environment variable.", ) parser.add_argument( "--timeout", default=DEFAULT_TIMEOUT, help="Timeout in seconds. Default: %(default)s.", type=float, ) parser.add_argument("script", help="Script to run") return parser pontos-25.3.2/pontos/github/script/errors.py000066400000000000000000000003301476255566300211420ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # from pontos.errors import PontosError class GitHubScriptError(PontosError): """An error with a GitHub script""" pontos-25.3.2/pontos/github/script/load.py000066400000000000000000000113001476255566300205440ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # import asyncio import importlib import os from argparse import ArgumentParser, Namespace from contextlib import contextmanager from pathlib import Path from types import ModuleType from typing import Generator, Union from httpx import Timeout from pontos.github.api.api import GitHubAsyncRESTApi from pontos.github.script.errors import GitHubScriptError from pontos.helper import add_sys_path, ensure_unload_module GITHUB_SCRIPT_FUNCTION_NAME = "github_script" GITHUB_SCRIPT_PARSER_FUNCTION_NAME = "add_script_arguments" @contextmanager def load_script( script: Union[str, os.PathLike], ) -> Generator[ModuleType, None, None]: """ A context manager to load a script module. The script is unloaded when the context manager exits. Args: script: Name or path of the script module to load Example: .. code-block:: python from pontos.github.script import load_script with load_script("path/to/script.py") as module: module.func() with load_script("some.python.module") as module: module.func() """ path = Path(script) if path.suffix == ".py": module_name = path.stem with ( add_sys_path(path.parent.absolute()), ensure_unload_module(module_name), ): yield importlib.import_module(module_name) else: module_name = str(path) with ensure_unload_module(module_name): yield importlib.import_module(module_name) def run_github_script_function( module: ModuleType, token: str, timeout: float, args: Namespace ) -> int: """ Run a github_script function from a Python module Args: module: Module that the GitHub script function contains token: A GitHub token for authentication timeout: Timeout for the GitHub requests in seconds args: Arguments forwarded to the script function Raises: GitHubScriptError: If the module doesn't have a github_script function or if the github_script function is not an async coroutine. Returns: The return value of the github_script coroutine Example: .. code-block:: python from pontos.github.script import ( load_script, run_github_script_function, ) with load_script("path/to/script.py") as module: return run_github_script_function(module, token, 60.0, args) with load_script("some.python.module") as module: return run_github_script_function(module, token, 60.0, args) """ if not hasattr(module, GITHUB_SCRIPT_FUNCTION_NAME): raise GitHubScriptError( f"{module.__file__} is not a valid Pontos GitHub Script. A " f"{GITHUB_SCRIPT_FUNCTION_NAME} function is missing." ) func = getattr(module, GITHUB_SCRIPT_FUNCTION_NAME) if not asyncio.iscoroutinefunction(func): # it's not async raise GitHubScriptError( f"{module.__file__} is not a valid Pontos GitHub Script. " f"{GITHUB_SCRIPT_FUNCTION_NAME} need to be an async coroutine " "function." ) async def run_async() -> int: async with GitHubAsyncRESTApi(token, timeout=Timeout(timeout)) as api: return await func(api, args) loop_owner = False try: loop = asyncio.get_running_loop() except RuntimeError: loop_owner = True loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) try: retval = loop.run_until_complete(run_async()) finally: if loop_owner: loop.close() return retval def run_add_arguments_function( module: ModuleType, parser: ArgumentParser ) -> None: """ Run a GitHub script add_script_arguments function (if available in the module). Args: module: Module containing the GitHub script add_script_arguments function parser: An ArgumentParser to add additional CLI arguments Example: .. code-block:: python from argparse import ArgumentParser from pontos.github.script import ( load_script, run_github_script_function, ) parser = ArgumentParser() with load_script("path/to/script.py") as module: run_add_arguments_function(module, parser) with load_script("some.python.module") as module: run_add_arguments_function(module, parser) """ func = getattr(module, GITHUB_SCRIPT_PARSER_FUNCTION_NAME, None) if func: func(parser) pontos-25.3.2/pontos/github/scripts/000077500000000000000000000000001476255566300174435ustar00rootroot00000000000000pontos-25.3.2/pontos/github/scripts/__init__.py000066400000000000000000000001321476255566300215500ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later pontos-25.3.2/pontos/github/scripts/add-topics.py000066400000000000000000000021311476255566300220410ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # """ This script prints out all repositories existing in the space of the given organization """ from argparse import ArgumentParser, Namespace from pontos.github.api import GitHubAsyncRESTApi def add_script_arguments(parser: ArgumentParser) -> None: parser.add_argument("organization", help="owner") parser.add_argument("repository", nargs="*", help="repo") parser.add_argument("--topics", nargs="*", help="new topics to set") async def github_script(api: GitHubAsyncRESTApi, args: Namespace) -> int: for repository in args.repository: topics = set( await api.repositories.topics(f"{args.organization}/{repository}") ) topics = topics.union(args.topics) print(topics) topics = await api.repositories.update_topics( f"{args.organization}/{repository}", topics ) print(f"{args.organization}/{repository}: ", end="") for topic in topics: print(f"{topic} ", end="") print() return 0 pontos-25.3.2/pontos/github/scripts/artifacts-download.py000066400000000000000000000023631476255566300236060ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # """ This script downloads a single artifacts of a given repository """ from argparse import ArgumentParser, Namespace from pathlib import Path from rich.progress import Progress from pontos.github.api import GitHubAsyncRESTApi def add_script_arguments(parser: ArgumentParser) -> None: parser.add_argument("repository") parser.add_argument("artifact", help="ID of the artifact to download") parser.add_argument( "--file", help="File to write the artifact to. Default: %(default)s", default="out.file", type=Path, ) async def github_script(api: GitHubAsyncRESTApi, args: Namespace) -> int: with args.file.open("wb") as f, Progress() as rich_progress: task_id = rich_progress.add_task( f"[red]Downloading Artifact {args.artifact}... ", total=None ) async with api.artifacts.download( args.repository, args.artifact ) as download: async for content, progress in download: rich_progress.advance(task_id, progress or 1) f.write(content) rich_progress.update(task_id, total=1, completed=1) return 0 pontos-25.3.2/pontos/github/scripts/artifacts-sum.py000066400000000000000000000015771476255566300226110ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # """ This script prints out all artifacts of a given repository """ from argparse import ArgumentParser, Namespace from rich.console import Console from pontos.github.api import GitHubAsyncRESTApi def add_script_arguments(parser: ArgumentParser) -> None: parser.add_argument("repository") async def github_script(api: GitHubAsyncRESTApi, args: Namespace) -> int: count = 0 expired = 0 size = 0.0 async for artifact in api.artifacts.get_all(args.repository): count += 1 if artifact.expired: expired += 1 continue size += artifact.size_in_bytes / (1024 * 1024) console = Console() console.print(f"{count} artifacts.") console.print(f"{expired} expired.") console.print(f"Size {size:.2f} MiB") return 0 pontos-25.3.2/pontos/github/scripts/artifacts.py000066400000000000000000000023331476255566300217760ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # """ This script prints out all artifacts of a given repository """ from argparse import ArgumentParser, Namespace from rich.console import Console from rich.table import Table from pontos.github.api import GitHubAsyncRESTApi def add_script_arguments(parser: ArgumentParser) -> None: parser.add_argument("repository") async def github_script(api: GitHubAsyncRESTApi, args: Namespace) -> int: table = Table() table.add_column("Name") table.add_column("ID") table.add_column("URL") table.add_column("Updated") table.add_column("Expired") table.add_column("Size (in KB)", justify="right") count = 0 async for artifact in api.artifacts.get_all(args.repository): table.add_row( artifact.name, str(artifact.id), f"[link={artifact.archive_download_url}]" f"{artifact.archive_download_url}[/link]", str(artifact.updated_at), str(artifact.expired), f"{artifact.size_in_bytes / 1024:.2f}", ) count += 1 console = Console() console.print(table) print(f"{count} artifacts.") return 0 pontos-25.3.2/pontos/github/scripts/branchprotection-check.py000066400000000000000000000015561476255566300244430ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2024 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # from argparse import ArgumentParser, Namespace from pontos.github.api import GitHubAsyncRESTApi def add_script_arguments(parser: ArgumentParser) -> None: parser.add_argument("repo") parser.add_argument("branch") async def github_script(api: GitHubAsyncRESTApi, args: Namespace) -> int: # draft script for checking the branch protection branch_protection = await api.branches.protection_rules( args.repo, args.branch ) if branch_protection: print( f"Branch protection enabled for the '{args.branch}' branch of the '{args.repo}' repository." ) return 0 else: print( f"Branch protection NOT enabled for the '{args.branch}' branch of the '{args.repo}' repository." ) return 1 pontos-25.3.2/pontos/github/scripts/branchprotection.py000066400000000000000000000020001476255566300233510ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # from argparse import ArgumentParser, Namespace from pontos.github.api import GitHubAsyncRESTApi from pontos.github.api.branch import update_from_applied_settings def add_script_arguments(parser: ArgumentParser) -> None: parser.add_argument("repo") parser.add_argument("branch") async def github_script(api: GitHubAsyncRESTApi, args: Namespace) -> int: # draft script for updating the branch protections branch_protection = await api.branches.protection_rules( args.repo, args.branch ) # switch required signatures enabled/disabled kwargs = update_from_applied_settings( branch_protection, required_signatures=not ( branch_protection.required_signatures and branch_protection.required_signatures.enabled ), ) await api.branches.update_protection_rules( args.repo, args.branch, **kwargs, ) return 0 pontos-25.3.2/pontos/github/scripts/create-repository.py000066400000000000000000000127431476255566300235040ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # """ This script creates a new repository with default settings """ import shutil from argparse import ArgumentParser, BooleanOptionalAction, Namespace from typing import Union from pontos.git import Git, MergeStrategy from pontos.github.api import GitHubAsyncRESTApi from pontos.github.api.repositories import GitIgnoreTemplate, LicenseType from pontos.github.models.base import Permission from pontos.github.script.errors import GitHubScriptError from pontos.testing import temp_directory TEMPLATES = { "python": "https://github.com/greenbone/python-project-template.git", "go": "https://github.com/greenbone/go-project-template.git", } GITIGNORE = {"python": GitIgnoreTemplate.PYTHON, "go": GitIgnoreTemplate.GO} def license_type(value: Union[str, LicenseType]) -> LicenseType: if isinstance(value, LicenseType): return value return LicenseType(value.lower()) def possible_license_types() -> str: return ", ".join( [ LicenseType.GNU_GENERAL_PUBLIC_LICENSE_2_0.value, LicenseType.GNU_GENERAL_PUBLIC_LICENSE_3_0.value, LicenseType.GNU_AFFERO_GENERAL_PUBLIC_LICENSE_3_0.value, ] ) def add_script_arguments(parser: ArgumentParser) -> None: parser.add_argument( "--template", choices=("python", "go"), help="Use template repo as base for the new repository.", ) parser.add_argument( "--team", help="Team that should have admin access to the repository.", ) parser.add_argument( "--license", help=f"License to choose for the repo: {possible_license_types()}. " "Default: %(default)s.", type=license_type, default=LicenseType.GNU_AFFERO_GENERAL_PUBLIC_LICENSE_3_0.value, ) parser.add_argument( "--visibility", choices=("public", "private"), default="private", help="Visibility of the repository. Default: %(default)s.", ) parser.add_argument("--description", help="Description of the repository.") parser.add_argument( "--branch-protection", action=BooleanOptionalAction, default=True, help="Enable/Disable branch protection for the main branch. Default is " "enabled.", ) parser.add_argument("name", help="Repository to create.") parser.add_argument( "organization", nargs="?", default="greenbone", help="Organization to create the repo in. Default: %(default)s.", ) async def github_script(api: GitHubAsyncRESTApi, args: Namespace) -> int: organization = args.organization repository = args.name private = True if args.visibility == "private" else False gitignore_template = GITIGNORE.get(args.template) license_template = args.license description = args.description branch_protection = args.branch_protection if args.team: team = await api.teams.get(organization, args.team) team_id = team.id else: team_id = None team = None with temp_directory() as temp_dir: git = Git() if args.template: git.clone(TEMPLATES[args.template], temp_dir, remote="template") dot_git_dir = temp_dir / ".git" shutil.rmtree(dot_git_dir) git.cwd = temp_dir git.init() git.add(".") git.commit(f"Starting commit from {args.template} template") else: git.cwd = temp_dir git.init() repo = await api.repositories.create( organization, repository, private=private, has_projects=False, has_wiki=False, allow_merge_commit=True, allow_auto_merge=True, allow_rebase_merge=True, allow_squash_merge=True, allow_update_branch=True, delete_branch_on_merge=True, is_template=False, license_template=license_template, description=description, auto_init=True, gitignore_template=gitignore_template, team_id=team_id, ) repo_url = repo.ssh_url if not repo_url: raise GitHubScriptError("No ssh repository URL") git.add_remote("upstream", repo_url) git.fetch("upstream") if args.template: git.rebase("upstream/main", strategy=MergeStrategy.ORT_OURS) else: git.checkout("main", start_point="upstream/main") if team: await api.teams.update_permission( organization, team.slug, repository, Permission.ADMIN ) code_owners_file = temp_dir / ".github" / "CODEOWNERS" code_owners_file.write_text( f"# default reviewers\n*\t@{organization}/{team.slug}\n" ) git.add(code_owners_file) git.commit("Adjust CODEOWNERS file") git.push(remote="upstream", force=True) if branch_protection: await api.branches.update_protection_rules( f"{organization}/{repository}", "main", require_branches_to_be_up_to_date=True, require_code_owner_reviews=True, required_approving_review_count=1, required_conversation_resolution=True, dismiss_stale_reviews=True, allow_force_pushes=False, allow_deletions=False, restrictions_users=[], ) return 0 pontos-25.3.2/pontos/github/scripts/delete-package-with-tag.py000066400000000000000000000030151476255566300243710ustar00rootroot00000000000000""" This script delete a package from a repository, if it contains the specified tag. """ from argparse import ArgumentParser, Namespace from pontos.github.api import GitHubAsyncRESTApi from pontos.github.models.packages import PackageType def package_type(value: str) -> PackageType: if isinstance(value, PackageType): return value return PackageType(value.lower()) def add_script_arguments(parser: ArgumentParser) -> None: parser.add_argument("organization", help="organization name") parser.add_argument("package", help="package name") parser.add_argument( "--package-type", type=package_type, help="package type", default=PackageType.CONTAINER, ) parser.add_argument("tag", help="The tag to be deleted.") async def github_script(api: GitHubAsyncRESTApi, args: Namespace) -> int: if not await api.packages.exists( organization=args.organization, package_name=args.package, package_type=args.package_type, ): print( f"Package {args.package} does not exist in organization {args.organization}" ) return 1 print(f"Found package {args.package} in organization {args.organization}") await api.packages.delete_package_with_tag( organization=args.organization, package_name=args.package, package_type=args.package_type, tag=args.tag, ) print( f"Deleted tag {args.tag} from package {args.package} in organization {args.organization}" ) return 0 pontos-25.3.2/pontos/github/scripts/enforce-admins.py000066400000000000000000000030711476255566300227100ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # """ This script locks a branch in a repo of an organization via branch protection """ from argparse import ArgumentParser, BooleanOptionalAction, Namespace from pontos.github.api import GitHubAsyncRESTApi from pontos.github.api.branch import update_from_applied_settings def add_script_arguments(parser: ArgumentParser) -> None: parser.add_argument("repository", help="org/repo combination") parser.add_argument("branch", help="branch to use") parser.add_argument( "--allow", action=BooleanOptionalAction, help="Allow/disallow admin users to bypass the branch protection rules", ) async def github_script(api: GitHubAsyncRESTApi, args: Namespace) -> int: branch_protection = await api.branches.protection_rules( args.repository, args.branch ) if args.allow: enforce_admins = False else: enforce_admins = True kwargs = update_from_applied_settings( branch_protection, enforce_admins=enforce_admins ) await api.branches.update_protection_rules( args.repository, args.branch, **kwargs, ) if args.allow: print( f"Allowed admins to bypass the branch protection rules for branch " f"{args.branch} in {args.repository} now." ) else: print( f"Admin users are not allowed to bypass the branch protection " f"rules for branch {args.branch} in {args.repository} now." ) return 0 pontos-25.3.2/pontos/github/scripts/find-package-tag.py000066400000000000000000000044731476255566300231070ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # """ This script checks wether a package has a specific tag """ from argparse import ArgumentParser, Namespace from pontos.github.api import GitHubAsyncRESTApi from pontos.github.models.packages import PackageType def package_type(value: str) -> PackageType: if isinstance(value, PackageType): return value return PackageType(value.lower()) def add_script_arguments(parser: ArgumentParser) -> None: parser.add_argument("organization", help="organization name") parser.add_argument("package", help="package name") parser.add_argument("tag", help="tag to check") parser.add_argument( "--package-type", type=package_type, help="package type", default=PackageType.CONTAINER, ) async def github_script(api: GitHubAsyncRESTApi, args: Namespace) -> int: if not await api.packages.exists( organization=args.organization, package_name=args.package, package_type=args.package_type, ): print( f"Package {args.package} does not exist in organization {args.organization}" ) return 1 print(f"Found package {args.package} in organization {args.organization}") async for package in api.packages.package_versions( organization=args.organization, package_name=args.package, package_type=args.package_type, ): print(f"Checking package {args.package} with id {package.id}") if package.metadata.container.tags: if args.tag in package.metadata.container.tags: print( f"Package {args.package} with id {package.id} has tag {args.tag}" ) tags = await api.packages.package_version_tags( args.organization, args.package_type, args.package, package.id, ) print(f"Tags: {tags}") await api.packages.delete_package_version( args.organization, args.package_type, args.package, package.id, ) return 0 print(f"Package {args.package} does not have tag {args.tag}") return 0 pontos-25.3.2/pontos/github/scripts/lock-branch.py000066400000000000000000000022771476255566300222100ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # """ This script locks a branch in a repo of an organization via branch protection """ from argparse import ArgumentParser, BooleanOptionalAction, Namespace from pontos.github.api import GitHubAsyncRESTApi from pontos.github.api.branch import update_from_applied_settings def add_script_arguments(parser: ArgumentParser) -> None: parser.add_argument("repository", help="org/repo combination") parser.add_argument("branch", help="branch to lock") parser.add_argument("--lock", action=BooleanOptionalAction, default=True) async def github_script(api: GitHubAsyncRESTApi, args: Namespace) -> int: branch_protection = await api.branches.protection_rules( args.repository, args.branch ) kwargs = update_from_applied_settings( branch_protection, lock_branch=args.lock, ) await api.branches.update_protection_rules( args.repository, args.branch, **kwargs, ) if args.lock: print(f"Locked branch {args.branch} in {args.repository}") else: print(f"Unlocked branch {args.branch} in {args.repository}") return 0 pontos-25.3.2/pontos/github/scripts/members.py000066400000000000000000000036461476255566300214600ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # """ This script prints out all members existing in the space of the given organization """ from argparse import ArgumentParser, Namespace from typing import Union from rich.console import Console from rich.table import Table from pontos.github.api import GitHubAsyncRESTApi from pontos.github.api.organizations import MemberFilter, MemberRole def member_filter_type(value: Union[str, MemberFilter]) -> MemberFilter: if isinstance(value, MemberFilter): return value return MemberFilter[value.upper()] def member_role_type(value: Union[str, MemberRole]) -> MemberRole: if isinstance(value, MemberRole): return value return MemberRole[value.upper()] def add_script_arguments(parser: ArgumentParser) -> None: parser.add_argument("organization") parser.add_argument( "-f", "--filter", type=member_filter_type, help=f"Filter members. Choices: " f"{', '.join([f.name for f in MemberFilter])}. Default: %(default)s", default=MemberFilter.ALL.name, ) parser.add_argument( "-r", "--role", type=member_role_type, help=f"Show only members in specific role. Choices: " f"{', '.join([f.name for f in MemberRole])}. Default: %(default)s", default=MemberRole.ALL.name, ) async def github_script(api: GitHubAsyncRESTApi, args: Namespace) -> int: table = Table() table.add_column("Name") table.add_column("URL") member_count = 0 async for user in api.organizations.members( args.organization, member_filter=args.filter, role=args.role ): table.add_row( user.login, f"[link={user.html_url}]{user.html_url}[/link]", ) member_count += 1 console = Console() console.print(table) print(f"{member_count} members.") return 0 pontos-25.3.2/pontos/github/scripts/release-assets-download.py000066400000000000000000000030461476255566300245450ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # """ This script downloads a single artifacts of a given repository """ from argparse import ArgumentParser, Namespace from pathlib import Path from rich.progress import Progress from pontos.github.api import GitHubAsyncRESTApi def add_script_arguments(parser: ArgumentParser) -> None: parser.add_argument("repository") parser.add_argument("tag", help="Release Tag") parser.add_argument( "--file", help="File to write the artifact to. Default: %(default)s", default="out.file", type=Path, ) parser.add_argument( "--type", choices=["zip", "tar"], help="Download release asset type. Default: %(default)s", default="tar", ) async def github_script(api: GitHubAsyncRESTApi, args: Namespace) -> int: with args.file.open("wb") as f, Progress() as rich_progress: task_id = rich_progress.add_task( f"[red]Downloading asset for tag {args.tag} as {args.type}... ", total=None, ) download_api = ( api.releases.download_release_tarball if args.type == "tar" else api.releases.download_release_zip ) async with download_api(args.repository, args.tag) as download: async for content, progress in download: rich_progress.advance(task_id, progress or 1) f.write(content) rich_progress.update(task_id, total=1, completed=1) return 0 pontos-25.3.2/pontos/github/scripts/repositories.py000066400000000000000000000032131476255566300225430ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # """ This script prints out all repositories existing in the space of the given organization """ from argparse import ArgumentParser, Namespace from typing import Union from rich.console import Console from rich.table import Table from pontos.github.api import GitHubAsyncRESTApi from pontos.github.models.organization import RepositoryType def repository_type(value: Union[str, RepositoryType]) -> RepositoryType: if isinstance(value, RepositoryType): return value return RepositoryType[value.upper()] def add_script_arguments(parser: ArgumentParser) -> None: parser.add_argument("organization") parser.add_argument( "-t", "--type", type=repository_type, help=f"What type of repositories should be printed? Choices: " f"{', '.join([f.name for f in RepositoryType])}. Default: %(default)s", default=RepositoryType.ALL.name, ) async def github_script(api: GitHubAsyncRESTApi, args: Namespace) -> int: table = Table() table.add_column("Name") table.add_column("Description") table.add_column("URL") table.add_column("Visibility") repo_count = 0 async for repo in api.organizations.get_repositories( args.organization, repository_type=args.type ): table.add_row( repo.name, repo.description, f"[link={repo.html_url}]{repo.html_url}[/link]", repo.visibility, ) repo_count += 1 console = Console() console.print(table) print(f"{repo_count} repositories.") return 0 pontos-25.3.2/pontos/github/scripts/search-repositories.py000066400000000000000000000140031476255566300240050ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # """ This script search for repositories and prints out all found repositories """ import csv import sys from abc import ABC, abstractmethod from argparse import ArgumentParser, ArgumentTypeError, Namespace from typing import Collection, Optional, Union from rich.console import Console from rich.table import Table from pontos.github.api import GitHubAsyncRESTApi from pontos.github.models.search import ( InDescriptionQualifier, InNameQualifier, InReadmeQualifier, InTopicsQualifier, IsPrivateQualifier, IsPublicQualifier, OrganizationQualifier, Qualifier, RepositorySort, SortOrder, UserQualifier, ) def lower(value: str) -> str: return value.lower() def order_type(value: Union[str, SortOrder]) -> SortOrder: if isinstance(value, SortOrder): return value try: return SortOrder[value.upper()] except KeyError: raise ArgumentTypeError(f"Invalid value {value}.") from None def sort_type(value: Union[str, RepositorySort]) -> RepositorySort: if isinstance(value, RepositorySort): return value try: return RepositorySort[value.upper()] except KeyError: raise ArgumentTypeError(f"Invalid value {value}.") from None def add_script_arguments(parser: ArgumentParser) -> None: parser.add_argument("terms", nargs="+") repo_space_group = parser.add_mutually_exclusive_group(required=False) repo_space_group.add_argument( "--organization", help="Restrict the search to the repositories of an organization.", ) repo_space_group.add_argument( "--user", help="Restrict the search to the repositories of a user." ) parser.add_argument( "--in-name", action="store_true", help="Search for terms within the repository name.", ) parser.add_argument( "--in-description", action="store_true", help="Search for terms within the repository description.", ) parser.add_argument( "--in-readme", action="store_true", help="Search for terms within the repository description.", ) parser.add_argument( "--in-topics", action="store_true", help="Search for terms within the repository topics.", ) visibility_group = parser.add_mutually_exclusive_group(required=False) visibility_group.add_argument( "--private", action="store_true", help="Restrict the search to private repositories only.", ) visibility_group.add_argument( "--public", action="store_true", help="Restrict the search to public repositories only.", ) parser.add_argument( "--format", choices=["console", "csv"], default="console", help="Output format. Default: %(default)s", ) parser.add_argument( "--columns", choices=["name", "description", "url", "visibility"], default=["name", "description", "url", "visibility"], type=lower, nargs="*", help="Define columns to print. Default: %(default)s.", ) parser.add_argument( "--order", type=order_type, help="Sort order. Choices: " f"{', '.join([o.name for o in SortOrder])}. Default: %(default)s.", default=SortOrder.DESC.name, ) parser.add_argument( "--sort", type=sort_type, help="Sort order. Choices: " f"{', '.join([o.name for o in RepositorySort])}.", ) class Format(ABC): def __init__(self, columns: Collection[str]) -> None: self.columns = columns @abstractmethod def add_row(self, **kwargs: Optional[str]) -> None: pass def finish(self) -> None: pass class CSVFormat(Format): def __init__(self, columns: Collection[str]) -> None: super().__init__(columns) self.csv_writer = csv.DictWriter(sys.stdout, fieldnames=self.columns) def add_row(self, **kwargs: Optional[str]) -> None: row = {} for column in self.columns: row[column] = kwargs[column] self.csv_writer.writerow(row) class ConsoleFormat(Format): def __init__(self, columns: Collection[str]) -> None: super().__init__(columns) self.table = Table() for column in self.columns: self.table.add_column(column) def add_row(self, **kwargs: Optional[str]) -> None: row = [] for column in self.columns: value = kwargs[column] if column == "url": value = f"[link={value}]{value}[/link]" row.append(value) self.table.add_row(*row) def finish(self) -> None: console = Console() console.print(self.table) console.print(f"{self.table.row_count} repositories.", highlight=False) async def github_script(api: GitHubAsyncRESTApi, args: Namespace) -> int: if args.format == "console": output: Format = ConsoleFormat(args.columns) else: output = CSVFormat(args.columns) qualifiers: list[Qualifier] = [] if args.public: qualifiers.append(IsPublicQualifier()) if args.private: qualifiers.append(IsPrivateQualifier()) if args.organization: qualifiers.append(OrganizationQualifier(args.organization)) if args.user: qualifiers.append(UserQualifier(args.user)) if args.in_name: qualifiers.append(InNameQualifier()) if args.in_description: qualifiers.append(InDescriptionQualifier()) if args.in_readme: qualifiers.append(InReadmeQualifier()) if args.in_topics: qualifiers.append(InTopicsQualifier()) async for repo in api.search.repositories( qualifiers=qualifiers, keywords=args.terms, order=args.order, sort=args.sort, ): output.add_row( name=repo.name, description=repo.description, url=repo.url, visibility=repo.visibility, ) output.finish() return 0 pontos-25.3.2/pontos/github/scripts/team-repositories.py000066400000000000000000000057651476255566300235050ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # """ This script adds team(s) to a list of repositories of an organization """ import asyncio from argparse import ArgumentParser, FileType, Namespace from io import TextIOWrapper from typing import Set, Union from httpx import HTTPStatusError from pontos.github.api import GitHubAsyncRESTApi from pontos.github.models.base import Permission def permission_type(value: Union[str, Permission]) -> Permission: return value if isinstance(value, Permission) else Permission[value.upper()] def add_script_arguments(parser: ArgumentParser) -> None: parser.add_argument( "--teams", nargs="+", help="Team(s) to give access to the repositories." ) parser.add_argument( "--organization", default="greenbone", help="GitHub Organization to use. Default: %(default)s.", ) parser.add_argument( "--fail-fast", "--failfast", dest="failfast", action="store_true", help="Stop on first error instead of continuing.", ) parser.add_argument( "--permission", type=permission_type, help=f"Permission to grant the team(s) on the repositories. Choices: " f"{', '.join([f.name for f in Permission])}. Default: %(default)s.", default=Permission.PULL.name, ) repo_group = parser.add_mutually_exclusive_group(required=True) repo_group.add_argument( "--repositories", nargs="+", help="List of repositories to give the team(s) access to.", ) repo_group.add_argument( "--repositories-file", dest="file", help="File to read a list of repositories from. The file needs to " "contain one repository per line. '-' to read from stdin.", type=FileType("r"), ) async def github_script(api: GitHubAsyncRESTApi, args: Namespace) -> int: if args.file: file: TextIOWrapper = args.file repositories = [line.strip() for line in file.readlines()] else: repositories = args.repositories tasks = [] for team in args.teams: for repo in repositories: tasks.append( asyncio.create_task( api.teams.add_permission( args.organization, team, repo, args.permission ) ) ) done, pending = await asyncio.wait( tasks, return_when=( asyncio.FIRST_EXCEPTION if args.failfast else asyncio.ALL_COMPLETED ), ) pending: Set[asyncio.Task] # if pending contains tasks an error occurred and fail fast was set. # therefore cancel pending tasks. for task in pending: task.cancel() has_error = False for task in done | pending: try: await task except HTTPStatusError as e: has_error = True print(e) except asyncio.CancelledError: pass return 0 if not has_error else 1 pontos-25.3.2/pontos/github/scripts/teams.py000066400000000000000000000020131476255566300211220ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # """ This script prints out all teams existing in the space of the given organization """ from argparse import ArgumentParser, Namespace from rich.console import Console from rich.table import Table from pontos.github.api import GitHubAsyncRESTApi def add_script_arguments(parser: ArgumentParser) -> None: parser.add_argument("organization") async def github_script(api: GitHubAsyncRESTApi, args: Namespace) -> int: table = Table() table.add_column("Name") table.add_column("Description") table.add_column("URL") table.add_column("Privacy") count = 0 async for team in api.teams.get_all(args.organization): table.add_row( team.name, team.description, f"[link={team.html_url}]{team.html_url}[/link]", team.privacy.value, ) count += 1 console = Console() console.print(table) print(f"{count} teams.") return 0 pontos-25.3.2/pontos/github/scripts/topics.py000066400000000000000000000014651476255566300213240ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # """ This script prints out all repositories existing in the space of the given organization """ from argparse import ArgumentParser, Namespace from pontos.github.api import GitHubAsyncRESTApi def add_script_arguments(parser: ArgumentParser) -> None: parser.add_argument("repository", help="owner/repo") parser.add_argument("topics", nargs="*", help="new topics to set") async def github_script(api: GitHubAsyncRESTApi, args: Namespace) -> int: if not args.topics: topics = await api.repositories.topics(args.repository) else: topics = await api.repositories.update_topics( args.repository, args.topics ) for topic in topics: print(topic) return 0 pontos-25.3.2/pontos/github/scripts/workflow-runs.py000066400000000000000000000042171476255566300226600ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # from argparse import ArgumentParser, Namespace from rich.console import Console from rich.table import Table from pontos.github.api import GitHubAsyncRESTApi def add_script_arguments(parser: ArgumentParser) -> None: parser.add_argument( "--actor", help="Only return workflow runs of this user ID." ) parser.add_argument( "--branch", help="Only return workflow runs for a specific branch." ) parser.add_argument( "--event", help="Only return workflow runs triggered by the event specified. " "For example, `push`, `pull_request` or `issue`.", ) parser.add_argument( "--status", help="Only return workflow runs with the status or conclusion " "specified.", ) parser.add_argument( "--created", help="Only returns workflow runs created within the given date-time " "range.", ) parser.add_argument("repository") parser.add_argument( "workflow", help="Workflow ID or workflow file name. For example `main.yml`.", ) async def github_script(api: GitHubAsyncRESTApi, args: Namespace) -> int: table = Table() table.add_column("Name") table.add_column("ID") table.add_column("URL") table.add_column("Branch") table.add_column("Event") table.add_column("Status") table.add_column("Updated") table.add_column("Actor") count = 0 async for run in api.workflows.get_workflow_runs( args.repository, args.workflow, actor=args.actor, branch=args.branch, event=args.event, status=args.status, created=args.created, ): table.add_row( run.name, str(run.id), f"[link={run.html_url}]{run.html_url}[/link]", run.head_branch, run.event.value, run.conclusion, str(run.updated_at), run.actor.login if run.actor else "", ) count += 1 console = Console() console.print(table) print(f"{count} workflow runs.") return 0 pontos-25.3.2/pontos/helper.py000066400000000000000000000335661476255566300163400ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2021-2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # import os import re import sys import warnings from contextlib import asynccontextmanager, contextmanager from datetime import timedelta from enum import Enum from functools import wraps from pathlib import Path from types import ModuleType from typing import ( Any, AsyncContextManager, AsyncIterator, Callable, Dict, Generator, Generic, Iterator, Optional, Tuple, Type, TypeVar, Union, ) import httpx from pontos.errors import PontosError from pontos.typing import SupportsStr __all__ = ( "AsyncDownloadProgressIterable", "DownloadProgressIterable", "add_sys_path", "deprecated", "download_async", "download", "ensure_unload_module", "enum_or_value", "parse_timedelta", "snake_case", "unload_module", ) DEFAULT_TIMEOUT = 1000 DEFAULT_CHUNK_SIZE = 4096 async def upload(file_path: Path) -> AsyncIterator[bytes]: with file_path.open("rb") as f: read = f.read(DEFAULT_CHUNK_SIZE) while read: yield read read = f.read(DEFAULT_CHUNK_SIZE) T = TypeVar("T", str, bytes) class AsyncDownloadProgressIterable(Generic[T]): """ An async iterator to iterate over a downloadable content and the progress. Example: .. code-block:: python from pontos.helper import AsyncDownloadProgressIterable it = AsyncDownloadProgressIterable(...) async for content, progress in it: file.write(content) print(progress) """ def __init__( self, *, content_iterator: AsyncIterator[T], url: SupportsStr, length: Optional[int], ): """ Create a new AsyncDownloadProgressIterable instance Args: content_iterator: An async iterator to call for getting the content. Should be a stream of bytes or strings. url: The URL where the content gets downloaded. length: Length of the content. """ self._content_iterator: AsyncIterator[T] = content_iterator self._url = str(url) self._length = None if length is None else int(length) @property def length(self) -> Optional[int]: """ Size in bytes of the to be downloaded file or None if the size is not available """ return self._length @property def url(self) -> str: """ The URL where the content gets downloaded from """ return self._url async def _download(self) -> AsyncIterator[Tuple[T, Optional[float]]]: dl = 0 async for content in self._content_iterator: dl += len(content) progress = dl / self._length * 100 if self._length else None yield content, progress def __aiter__(self) -> AsyncIterator[Tuple[T, Optional[float]]]: """ Returns an async iterator yielding a tuple of content and progress. The progress is expressed as percent of the content length. """ return self._download() @asynccontextmanager async def download_async( stream: AsyncContextManager[httpx.Response], *, content_length: Optional[int] = None, chunk_size: int = DEFAULT_CHUNK_SIZE, url: Optional[str] = None, ) -> AsyncIterator[AsyncDownloadProgressIterable[bytes]]: """ An async context manager that returns an AsyncDownloadProgressIterable. It ensures that the stream is closed automatically via the context manager. Args: stream: An async context manager providing a streaming response. content_length: Optional length of the content to download. If now provided it is determined from the response if available. chunk_size: Download the content in chunks of this size. url: Use a specific URL. If not set the URL of the response is used. Returns: A context manager containing an AsyncDownloadProgressIterable Raises: HTTPStatusError: If the request was invalid Example: .. code-block:: python import httpx from pontos.helper import download_async client = httpx.AsyncClient(...) stream = client.stream("GET, "https://foo.bar/baz.zip) async with download_async(stream) as download: async for content, progress in download: file.write(content) print(progress) """ async with stream as response: response.raise_for_status() if not content_length: content_length = response.headers.get("content-length") yield AsyncDownloadProgressIterable( url=url if url else response.url, content_iterator=response.aiter_bytes(chunk_size=chunk_size), length=content_length, ) class DownloadProgressIterable: """ An synchronous iterator to iterate over a download progress. Example: .. code-block:: python from pontos.helper import DownloadProgressIterable it = DownloadProgressIterable(...) for progress in it: print(progress) """ def __init__( self, *, content_iterator: Iterator[bytes], url: str, destination: Path, length: Optional[int], ) -> None: """ Create a new DownloadProgressIterable instance Args: content_iterator: An iterator of bytes to write to destination path url: A URL where the content will be downloaded from destination: Path to write the downloaded content to length: Length of the content to be downloaded """ self._content_iterator = content_iterator self._url = url self._destination = destination self._length = None if length is None else int(length) @property def length(self) -> Optional[int]: """ Size in bytes of the to be downloaded file or None if the size is not available """ return self._length @property def destination(self) -> Path: """ Destination path of the to be downloaded file """ return self._destination @property def url(self) -> str: return self._url def _download(self) -> Iterator[Optional[float]]: dl = 0 with self._destination.open("wb") as f: for content in self._content_iterator: dl += len(content) f.write(content) yield dl / self._length if self._length else None def __iter__(self) -> Iterator[Optional[float]]: return self._download() def run(self): """ Just run the download without caring about the progress """ try: it = iter(self) while True: next(it) except StopIteration: pass @contextmanager def download( url: str, destination: Optional[Union[Path, str]] = None, *, headers: Optional[Dict[str, Any]] = None, params: Optional[Dict[str, Any]] = None, chunk_size: int = DEFAULT_CHUNK_SIZE, timeout: int = DEFAULT_TIMEOUT, ) -> Generator[DownloadProgressIterable, None, None]: """Download file in url to filename Arguments: url: The url of the file we want to download destination: Path of the file to store the download in. If set it will be derived from the passed URL. headers: HTTP headers to use for the download params: HTTP request parameters to use for the download chunk_size: Download file in chunks of this size timeout: Connection timeout Raises: HTTPStatusError: If the request was invalid Returns: A DownloadProgressIterator that yields the progress of the download in percent for each downloaded chunk or None for each chunk if the progress is unknown. Example: .. code-block:: python from pontos.helper import download with download("https://example.com/some/file") as progress_it: for progress in progress_it: print(progress) """ destination = ( Path(url.split("/")[-1]) if not destination else Path(destination) ) with httpx.stream( "GET", url, timeout=timeout, follow_redirects=True, headers=headers, params=params, ) as response: response.raise_for_status() total_length = response.headers.get("content-length") yield DownloadProgressIterable( url=url, content_iterator=response.iter_bytes(chunk_size=chunk_size), destination=destination, length=total_length, ) def deprecated( _func_or_cls: Union[str, Callable, Type, None] = None, *, since: Optional[str] = None, reason: Optional[str] = None, ): """ A decorator to mark functions, classes and methods as deprecated Args: since: An optional version since the referenced item is deprecated. reason: An optional reason why the references item is deprecated. Examples: .. code-block:: python from pontos.helper import deprecated @deprecated def my_function(*args, **kwargs): ... @deprecated("The function is obsolete. Please use my_func instead.") def my_function(*args, **kwargs): ... @deprecated( since="1.2.3", reason="The function is obsolete. Please use my_func instead." ) def my_function(*args, **kwargs): ... @deprecated(reason="The class will be removed in version 3.4.5") class Foo: ... class Foo: @deprecated(since="2.3.4") def bar(self, *args, **kwargs): ... """ if isinstance(_func_or_cls, str): reason = _func_or_cls _func_or_cls = None def decorator_repeat(func_or_cls): module = func_or_cls.__module__ name = func_or_cls.__name__ if module == "__main__": msg = f"{name} is deprecated." else: msg = f"{module}.{name} is deprecated." if since: msg += f" It is deprecated since version {since}." if reason: msg += f" {reason}" @wraps(func_or_cls) def wrapper(*args, **kwargs): warnings.warn(msg, category=DeprecationWarning, stacklevel=3) return func_or_cls(*args, **kwargs) return wrapper if _func_or_cls is None: return decorator_repeat else: return decorator_repeat(_func_or_cls) @contextmanager def add_sys_path( directory: Union[str, os.PathLike], ) -> Generator[None, None, None]: """ Context Manager to add a directory path to the module search path aka. sys.path. The directory path is removed when the context manager is left. Args: directory: A os.PathLike directory to add to sys.path Example: .. code-block:: python from pontos.helper import add_sys_path with add_sys_path("/tmp/test-modules"): import mymodule """ directory = os.fspath(directory) if sys.path[0] != directory: sys.path.insert(0, directory) try: yield finally: try: sys.path.remove(directory) except ValueError: # directory was not in the path pass def unload_module(module: Union[str, ModuleType]) -> None: """ Unload a Python module Args: module: Module instance or name of the Python module to unload. For example: foo.bar """ name = module.__name__ if isinstance(module, ModuleType) else module if name in sys.modules: del sys.modules[name] @contextmanager def ensure_unload_module( module: Union[str, ModuleType], ) -> Generator[None, None, None]: """ A context manager to ensure that a module gets removed even if an error occurs Args: module: Module instance or name of the Python module to unload. For example: foo.bar Example: .. code-block:: python from pontos.helper import ensure_unload_module with ensure_unload_module("foo.bar"): do_something() """ try: yield finally: unload_module(module) def snake_case(value: str) -> str: """ Convert a string to snake case/underscore naming scheme Args: value: String to convert into snake case Example: .. code-block:: python from pontos.helper import snake_case snake_case("CamelCase") will return "camel_case" """ s1 = re.sub("(.)([A-Z][a-z]+)", r"\1_\2", value) return re.sub("([a-z0-9])([A-Z])", r"\1_\2", s1).lower() def enum_or_value(value: Union[Enum, Any]) -> Any: """ Return the value of an Enum or the value if it isn't an Enum """ if isinstance(value, Enum): return value.value return value regex = re.compile( r"^((?P[\.\d]+?)w)?((?P[\.\d]+?)d)?((?P[\.\d]+?)h)?" r"((?P[\.\d]+?)m)?((?P[\.\d]+?)s)?$" ) def parse_timedelta(time_str: str) -> timedelta: """ Parse a timedelta from a string Examples: .. code-block:: python from pontos.helper import parse_timedelta parse_timedelta("1.5h") parse_timedelta("1w2d4h5m6s") """ time_match = regex.match(time_str) if not time_match: raise PontosError(f"Invalid timedelta format '{time_str}'.") parts = time_match.groupdict() time_params = {} for name, param in parts.items(): if param: time_params[name] = float(param) return timedelta(**time_params) pontos-25.3.2/pontos/models/000077500000000000000000000000001476255566300157555ustar00rootroot00000000000000pontos-25.3.2/pontos/models/__init__.py000066400000000000000000000124161476255566300200720ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # from dataclasses import dataclass from datetime import date, datetime, timezone from inspect import isclass from typing import Any, Dict, Type, Union, get_args, get_origin, get_type_hints from dateutil import parser as dateparser from pontos.enum import StrEnum from pontos.errors import PontosError __all__ = ( "Model", "ModelError", "StrEnum", "dotted_attributes", ) class ModelError(PontosError): """ Errors raised for Models """ def dotted_attributes(obj: Any, data: Dict[str, Any]) -> Any: """ Set dotted attributes on an object Example: .. code-block:: python class Foo: '''Some class''' foo = Foo() attrs = {"bar": 123, "baz": 456} foo = dotted_attributes(foo, attrs) print(foo.bar, foo.baz) """ for key, value in data.items(): if isinstance(value, dict): default = None if hasattr(obj, key) else ModelAttribute() prop = getattr(obj, key, default) value = dotted_attributes(prop, value) # noqa: PLW2901 setattr(obj, key, value) return obj class ModelAttribute: """ A utility class to allow setting attributes """ def _get_value_from_model_field_cls( model_field_cls: Type[Any], value: Any ) -> Any: if isclass(model_field_cls) and issubclass(model_field_cls, Model): value = model_field_cls.from_dict(value) elif isclass(model_field_cls) and issubclass(model_field_cls, datetime): # Only Python 3.11 supports sufficient formats in # datetime.fromisoformat. Therefore we have to use dateutil here. value = dateparser.isoparse(value) # the iso format may not contain UTC data or a UTC offset # this means it is considered local time (Python calls this "naive" # datetime) and can't really be compared to other times. # Let's UTC in these cases: if not value.tzinfo: value = value.replace(tzinfo=timezone.utc) elif isclass(model_field_cls) and issubclass(model_field_cls, date): value = date.fromisoformat(value) elif get_origin(model_field_cls) is list: model_field_cls = get_args(model_field_cls)[0] value = _get_value_from_model_field_cls(model_field_cls, value) elif get_origin(model_field_cls) is dict: model_field_cls = dict value = _get_value_from_model_field_cls(model_field_cls, value) elif get_origin(model_field_cls) is Union: possible_types = get_args(model_field_cls) current_type = type(value) if current_type in possible_types: model_field_cls = current_type else: # currently Unions should not contain Models. this would require # to iterate over the possible type, check if it is a Model # class and try to create an instance of this class until it # fits. For now just fallback to first type model_field_cls = possible_types[0] value = _get_value_from_model_field_cls(model_field_cls, value) else: if isinstance(value, dict): value = model_field_cls(**value) else: value = model_field_cls(value) return value def _get_value(model_field_cls: Type[Any], value: Any) -> Any: if model_field_cls: value = _get_value_from_model_field_cls(model_field_cls, value) return value @dataclass(init=False) class Model: """ Base class for models """ @classmethod def from_dict(cls, data: Dict[str, Any]): """ Create a model from a dict Example: .. code-block:: python model = Model.from_dict({ "id": 123, "node_id": "abcde", "created_at": "2017-07-08T16:18:44-04:00", "updated_at": "2017-07-08T16:18:44-04:00", }) """ if not isinstance(data, dict): raise ValueError( f"Invalid data for creating an instance of {cls.__name__} " f"model. Data is {data!r}" ) kwargs = {} additional_attrs = {} type_hints = get_type_hints(cls) for name, value in data.items(): try: if isinstance(value, list): model_field_cls = type_hints.get(name) value = [_get_value(model_field_cls, v) for v in value] # type: ignore # pylint: disable=line-too-long # noqa: E501,PLW2901 elif value is not None: model_field_cls = type_hints.get(name) value = _get_value(model_field_cls, value) # type: ignore # pylint: disable=line-too-long # noqa: E501,PLW2901 except (ValueError, TypeError) as e: raise ModelError( f"Error while creating {cls.__name__} model. Could not set " f"value for property '{name}' from '{value}'." ) from e if name in type_hints: kwargs[name] = value else: additional_attrs[name] = value instance = cls(**kwargs) dotted_attributes(instance, additional_attrs) return instance pontos-25.3.2/pontos/nvd/000077500000000000000000000000001476255566300152615ustar00rootroot00000000000000pontos-25.3.2/pontos/nvd/__init__.py000066400000000000000000000004231476255566300173710ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # from .api import NVDApi, NVDResults, convert_camel_case, format_date, now __all__ = ( "convert_camel_case", "format_date", "now", "NVDApi", "NVDResults", ) pontos-25.3.2/pontos/nvd/api.py000066400000000000000000000305531476255566300164120ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # import asyncio import time from abc import ABC from datetime import datetime, timezone from types import TracebackType from typing import ( Any, AsyncIterable, AsyncIterator, Awaitable, Callable, Dict, Generator, Generic, Iterator, Optional, Sequence, Type, TypeVar, Union, ) from httpx import URL, AsyncClient, Response, Timeout from pontos.errors import PontosError from pontos.helper import snake_case SLEEP_TIMEOUT = 30.0 # in seconds DEFAULT_TIMEOUT = 180.0 # three minutes DEFAULT_TIMEOUT_CONFIG = Timeout(DEFAULT_TIMEOUT) # three minutes RETRY_DELAY = 2.0 # in seconds Headers = Dict[str, str] Params = Dict[str, Union[str, int]] JSON = dict[str, Union[int, str, dict[str, Any]]] __all__ = ( "convert_camel_case", "format_date", "now", "NVDApi", "NVDResults", ) def now() -> datetime: """ Return current datetime with UTC timezone applied """ return datetime.now(tz=timezone.utc) def format_date(date: datetime) -> str: """ Format date matching to NVD api Args: date: Date to format Returns: Formatted date as string """ return date.isoformat(timespec="seconds") def convert_camel_case(dct: Dict[str, Any]) -> Dict[str, Any]: """ Convert camel case keys into snake case keys Args: dct: dict to convert Returns: A dict with key names converted to snake case """ converted = {} for key, value in dct.items(): converted[snake_case(key)] = value return converted class NoMoreResults(PontosError): """ Raised if the NVD API has no more results to consume """ class InvalidState(PontosError): """ Raised if the state of the NVD API is invalid """ T = TypeVar("T") result_iterator_func = Callable[[JSON], Iterator[T]] class NVDResults(Generic[T], AsyncIterable[T], Awaitable["NVDResults"]): """ A generic object for accessing the results of a NVD API response It implements the pagination and will issue requests against the NVD API. """ def __init__( self, api: "NVDApi", params: Params, result_func: result_iterator_func, *, request_results: Optional[int] = None, results_per_page: Optional[int] = None, start_index: int = 0, ) -> None: self._api = api self._params = params self._url: Optional[URL] = None self._data: Optional[JSON] = None self._it: Optional[Iterator[T]] = None self._total_results: Optional[int] = None self._downloaded_results: int = 0 self._start_index = start_index self._request_results = request_results self._results_per_page = results_per_page self._current_index = start_index self._current_request_results = request_results self._current_results_per_page = results_per_page self._result_func = result_func async def chunks(self) -> AsyncIterator[Sequence[T]]: """ Return the results in chunks The size of the chunks is defined by results_per_page. Examples: .. code-block:: python nvd_results: NVDResults = ... async for results in nvd_results.chunks(): for result in results: print(result) """ while True: try: if self._it: yield list(self._it) await self._next_iterator() except NoMoreResults: return async def items(self) -> AsyncIterator[T]: """ Return the results of the NVD API response Examples: .. code-block:: python nvd_results: NVDResults = ... async for result in nvd_results.items(): print(result) """ while True: try: if self._it: for result in self._it: yield result await self._next_iterator() except NoMoreResults: return async def json(self) -> Optional[JSON]: """ Return the result from the NVD API request as JSON Examples: .. code-block:: python nvd_results: NVDResults = ... while data := await nvd_results.json(): print(data) Returns: The response data as JSON or None if the response is exhausted. """ try: if not self._data: await self._next_iterator() data = self._data self._data = None return data except NoMoreResults: return None def __len__(self) -> int: """ Get the number of available result items for a NVD API request Examples: .. code-block:: python nvd_results: NVDResults = ... total_results = len(nvd_results) # None because it hasn't been awaited yet json = await nvd_results.json() # request the plain JSON data total_results = len(nvd_results) # contains the total number of results now nvd_results: NVDResults = ... total_results = len(nvd_results) # None because it hasn't been awaited yet async for result in nvd_results: print(result) total_results = len(nvd_results) # contains the total number of results now Returns: The total number of available results if the NVDResults has been awaited """ if self._total_results is None: raise InvalidState( f"{self.__class__.__name__} has not been awaited yet." ) return self._total_results def __aiter__(self) -> AsyncIterator[T]: """ Return the results of the NVD API response Same as the items() method. @see items() Examples: .. code-block:: python nvd_results: NVDResults = ... async for result in nvd_results: print(result) """ return self.items() def __await__(self) -> Generator[Any, None, "NVDResults"]: """ Request the next results from the NVD API Examples: .. code-block:: python nvd_results: NVDResults = ... print(len(nvd_results)) # None, because no request has been send yet await nvd_results # creates a request to the NVD API print(len(nvd_results)) Returns: The response data as JSON or None if the response is exhausted. """ return self._next_iterator().__await__() async def _load_next_data(self) -> None: if ( self._current_request_results is not None and self._downloaded_results >= self._current_request_results ): raise NoMoreResults() if ( self._total_results is not None and self._current_index >= self._total_results ): raise NoMoreResults() params = self._params params["startIndex"] = self._current_index if self._current_results_per_page is not None: params["resultsPerPage"] = self._current_results_per_page response = await self._api._get(params=params) response.raise_for_status() self._url = response.url data: JSON = response.json(object_hook=convert_camel_case) self._data = data self._current_results_per_page = int(data["results_per_page"]) # type: ignore self._total_results = int(data["total_results"]) # type: ignore self._current_index += self._current_results_per_page self._downloaded_results += self._current_results_per_page if not self._current_request_results: self._current_request_results = self._total_results if ( self._request_results and self._downloaded_results + self._current_results_per_page > self._request_results ): # avoid downloading more results then requested self._current_results_per_page = ( self._request_results - self._downloaded_results ) async def _get_next_iterator(self) -> Iterator[T]: await self._load_next_data() return self._result_func(self._data) # type: ignore async def _next_iterator(self) -> "NVDResults": self._it = await self._get_next_iterator() return self def __repr__(self) -> str: return ( f"<{self.__class__.__name__} " f'url="{self._url}" ' f"total_results={self._total_results} " f"start_index={self._start_index} " f"current_index={self._current_index} " f"results_per_page={self._results_per_page}>" ) class NVDApi(ABC): """ Abstract base class for querying the NIST NVD API. Should be used as an async context manager. """ def __init__( self, url: str, *, token: Optional[str] = None, timeout: Optional[Timeout] = DEFAULT_TIMEOUT_CONFIG, rate_limit: bool = True, request_attempts: int = 1, ) -> None: """ Create a new instance of the CVE API. Args: url: The API URL to use. token: The API key to use. Using an API key allows to run more requests at the same time. timeout: Timeout settings for the HTTP requests rate_limit: Set to False to ignore rate limits. The public rate limit (without an API key) is 5 requests in a rolling 30 second window. The rate limit with an API key is 50 requests in a rolling 30 second window. See https://nvd.nist.gov/developers/start-here#divRateLimits Default: True. request_attempts: The number of attempts per HTTP request. Defaults to 1. """ self._url = url self._token = token self._client = AsyncClient(http2=True, timeout=timeout) if rate_limit: self._rate_limit: Optional[int] = 50 if token else 5 else: self._rate_limit = None self._request_count = 0 self._last_sleep = time.monotonic() self._request_attempts = request_attempts def _request_headers(self) -> Headers: """ Get the default request headers """ headers = {} if self._token: headers["apiKey"] = self._token return headers async def _consider_rate_limit(self) -> None: """ Apply rate limit if necessary """ if not self._rate_limit: return self._request_count += 1 if self._request_count > self._rate_limit: time_since_last_sleep = time.monotonic() - self._last_sleep if time_since_last_sleep < SLEEP_TIMEOUT: time_to_sleep = SLEEP_TIMEOUT - time_since_last_sleep await asyncio.sleep(time_to_sleep) self._last_sleep = time.monotonic() self._request_count = 0 async def _get( self, *, params: Optional[Params] = None, ) -> Response: """ A request against the NIST NVD REST API. """ headers = self._request_headers() for attempt in range(self._request_attempts): if attempt > 0: delay = RETRY_DELAY**attempt await asyncio.sleep(delay) await self._consider_rate_limit() response = await self._client.get( self._url, headers=headers, params=params ) if not response.is_server_error: break return response async def __aenter__(self) -> "NVDApi": # reset rate limit counter self._request_count = 0 await self._client.__aenter__() return self async def __aexit__( self, exc_type: Optional[Type[BaseException]], exc_value: Optional[BaseException], traceback: Optional[TracebackType], ) -> Optional[bool]: return await self._client.__aexit__( # type: ignore exc_type, exc_value, traceback ) pontos-25.3.2/pontos/nvd/cpe/000077500000000000000000000000001476255566300160305ustar00rootroot00000000000000pontos-25.3.2/pontos/nvd/cpe/__init__.py000066400000000000000000000022421476255566300201410ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # import asyncio from argparse import Namespace from typing import Callable import httpx from pontos.nvd.cpe.api import CPEApi from ._parser import cpe_parser, cpes_parser __all__ = ("CPEApi",) async def query_cpe(args: Namespace) -> None: async with CPEApi(token=args.token) as api: cpe = await api.cpe(args.cpe_name_id) print(cpe) async def query_cpes(args: Namespace) -> None: async with CPEApi(token=args.token) as api: response = api.cpes( keywords=args.keywords, cpe_match_string=args.cpe_match_string, request_results=args.number, start_index=args.start, ) async for cpe in response: print(cpe) def cpe_main() -> None: main(cpe_parser(), query_cpe) def cpes_main() -> None: main(cpes_parser(), query_cpes) def main(args: Namespace, func: Callable) -> None: try: asyncio.run(func(args)) except KeyboardInterrupt: pass except httpx.HTTPStatusError as e: print(f"HTTP Error {e.response.status_code}: {e.response.text}") pontos-25.3.2/pontos/nvd/cpe/_parser.py000066400000000000000000000024521476255566300200400ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2024 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later from argparse import ArgumentParser, Namespace from typing import Optional, Sequence import shtab def cpes_parser(args: Optional[Sequence[str]] = None) -> Namespace: parser = ArgumentParser() shtab.add_argument_to(parser) parser.add_argument("--token", help="API key to use for querying.") parser.add_argument( "--cpe-match-string", help="Search for CPE names that exist in the Official CPE Dictionary.", ) parser.add_argument( "--keywords", nargs="*", help="Search for CPEs containing the keyword in their titles and " "references.", ) parser.add_argument( "--number", "-n", metavar="N", help="Request only N CPEs", type=int ) parser.add_argument( "--start", "-s", help="Index of the first CPE to request.", type=int, ) return parser.parse_args(args) def cpe_parser(args: Optional[Sequence[str]] = None) -> Namespace: parser = ArgumentParser() shtab.add_argument_to(parser) parser.add_argument("--token", help="API key to use for querying.") parser.add_argument( "cpe_name_id", metavar="CPE Name ID", help="UUID of the CPE" ) return parser.parse_args(args) pontos-25.3.2/pontos/nvd/cpe/api.py000066400000000000000000000163371476255566300171650ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # from datetime import datetime from types import TracebackType from typing import ( Any, Iterator, List, Optional, Type, Union, ) from uuid import UUID from httpx import Timeout from pontos.errors import PontosError from pontos.nvd.api import ( DEFAULT_TIMEOUT_CONFIG, JSON, NVDApi, NVDResults, Params, convert_camel_case, format_date, now, ) from pontos.nvd.models.cpe import CPE DEFAULT_NIST_NVD_CPES_URL = "https://services.nvd.nist.gov/rest/json/cpes/2.0" MAX_CPES_PER_PAGE = 10000 def _result_iterator(data: JSON) -> Iterator[CPE]: results: list[dict[str, Any]] = data.get("products", []) # type: ignore return (CPE.from_dict(result["cpe"]) for result in results) class CPEApi(NVDApi): """ API for querying the NIST NVD CPE information. Should be used as an async context manager. Example: .. code-block:: python from pontos.nvd.cpe import CPEApi async with CPEApi() as api: cpe = await api.cpe(...) """ def __init__( self, *, token: Optional[str] = None, timeout: Optional[Timeout] = DEFAULT_TIMEOUT_CONFIG, rate_limit: bool = True, request_attempts: int = 1, ) -> None: """ Create a new instance of the CPE API. Args: token: The API key to use. Using an API key allows to run more requests at the same time. timeout: Timeout settings for the HTTP requests rate_limit: Set to False to ignore rate limits. The public rate limit (without an API key) is 5 requests in a rolling 30 second window. The rate limit with an API key is 50 requests in a rolling 30 second window. See https://nvd.nist.gov/developers/start-here#divRateLimits Default: True. request_attempts: The number of attempts per HTTP request. Defaults to 1. """ super().__init__( DEFAULT_NIST_NVD_CPES_URL, token=token, timeout=timeout, rate_limit=rate_limit, request_attempts=request_attempts, ) async def cpe(self, cpe_name_id: Union[str, UUID]) -> CPE: """ Query for a CPE matching the CPE UUID. Args: cpe_name_id: Returns a specific CPE record identified by a Universal Unique Identifier (UUID). Example: .. code-block:: python from pontos.nvd.cpe import CPEApi async with CPEApi() as api: cpe = await api.cpe("87316812-5F2C-4286-94FE-CC98B9EAEF53") print(cpe) Returns: A single CPE matching the CPE UUID Raises: PontosError: If a CPE with the CPE UUID couldn't be found. """ if not cpe_name_id: raise PontosError("Missing CPE Name ID.") response = await self._get(params={"cpeNameId": str(cpe_name_id)}) response.raise_for_status() data = response.json(object_hook=convert_camel_case) products = data["products"] if not products: raise PontosError(f"No CPE with CPE Name ID '{cpe_name_id}' found.") product = products[0] return CPE.from_dict(product["cpe"]) def cpes( self, *, last_modified_start_date: Optional[datetime] = None, last_modified_end_date: Optional[datetime] = None, cpe_match_string: Optional[str] = None, keywords: Optional[Union[List[str], str]] = None, match_criteria_id: Optional[str] = None, request_results: Optional[int] = None, start_index: int = 0, results_per_page: Optional[int] = None, ) -> NVDResults[CPE]: """ Get all CPEs for the provided arguments https://nvd.nist.gov/developers/products Args: last_modified_start_date: Return all CPEs modified after this date. last_modified_end_date: Return all CPEs modified before this date. If last_modified_start_date is set but no last_modified_end_date is passed it is set to now. cpe_match_string: Returns all CPE names that exist in the Official CPE Dictionary. keywords: Returns only the CPEs where a word or phrase is found in the metadata title or reference links. match_criteria_id: Returns all CPE records associated with a match string identified by its UUID. request_results: Number of CPEs to download. Set to None (default) to download all available CPEs. start_index: Index of the first CPE to be returned. Useful only for paginated requests that should not start at the first page. results_per_page: Number of results in a single requests. Mostly useful for paginated requests. Returns: A NVDResponse for CPEs Examples: .. code-block:: python from pontos.nvd.cpe import CPEApi async with CPEApi() as api: async for cpe in api.cpes(keywords=["Mac OS X"]): print(cpe.cpe_name, cpe.cpe_name_id) json = await api.cpes(request_results=10).json() async for cpes in api.cpes( cpe_match_string="cpe:2.3:o:microsoft:windows_7:-:*:*:*:*:*:*:*", ).chunks(): for cpe in cpes: print(cpe) """ params: Params = {} if last_modified_start_date: params["lastModStartDate"] = format_date(last_modified_start_date) if not last_modified_end_date: params["lastModEndDate"] = format_date(now()) if last_modified_end_date: params["lastModEndDate"] = format_date(last_modified_end_date) if cpe_match_string: params["cpeMatchString"] = cpe_match_string if keywords: if isinstance(keywords, str): keywords = [keywords] params["keywordSearch"] = " ".join(keywords) if any((" " in keyword for keyword in keywords)): params["keywordExactMatch"] = "" if match_criteria_id: params["matchCriteriaId"] = match_criteria_id results_per_page = min( results_per_page or MAX_CPES_PER_PAGE, request_results or MAX_CPES_PER_PAGE, ) return NVDResults( self, params, _result_iterator, request_results=request_results, results_per_page=results_per_page, start_index=start_index, ) async def __aenter__(self) -> "CPEApi": await super().__aenter__() return self async def __aexit__( self, exc_type: Optional[Type[BaseException]], exc_value: Optional[BaseException], traceback: Optional[TracebackType], ) -> Optional[bool]: return await super().__aexit__( # type: ignore exc_type, exc_value, traceback ) pontos-25.3.2/pontos/nvd/cpe_match/000077500000000000000000000000001476255566300172045ustar00rootroot00000000000000pontos-25.3.2/pontos/nvd/cpe_match/__init__.py000066400000000000000000000023451476255566300213210ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2024 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later import asyncio from argparse import Namespace from typing import Callable import httpx from pontos.nvd.cpe_match.api import CPEMatchApi from ._parser import cpe_match_parse, cpe_matches_parse __all__ = ("CPEMatchApi",) async def query_cpe_match(args: Namespace) -> None: async with CPEMatchApi(token=args.token) as api: cpe_match = await api.cpe_match(args.match_criteria_id) print(cpe_match) async def query_cpe_matches(args: Namespace) -> None: async with CPEMatchApi(token=args.token) as api: response = api.cpe_matches( cve_id=args.cve_id, request_results=args.number, start_index=args.start, ) async for cpe_match in response: print(cpe_match) def cpe_match_main() -> None: main(cpe_match_parse(), query_cpe_match) def cpe_matches_main() -> None: main(cpe_matches_parse(), query_cpe_matches) def main(args: Namespace, func: Callable) -> None: try: asyncio.run(func(args)) except KeyboardInterrupt: pass except httpx.HTTPStatusError as e: print(f"HTTP Error {e.response.status_code}: {e.response.text}") pontos-25.3.2/pontos/nvd/cpe_match/_parser.py000066400000000000000000000021551476255566300212140ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2024 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later from argparse import ArgumentParser, Namespace from typing import Optional, Sequence import shtab def cpe_matches_parse(args: Optional[Sequence[str]] = None) -> Namespace: parser = ArgumentParser() shtab.add_argument_to(parser) parser.add_argument("--token", help="API key to use for querying.") parser.add_argument("--cve-id", help="Get matches for a specific CVE") parser.add_argument( "--number", "-n", metavar="N", help="Request only N matches", type=int ) parser.add_argument( "--start", "-s", help="Index of the first match to request.", type=int, ) return parser.parse_args(args) def cpe_match_parse(args: Optional[Sequence[str]] = None) -> Namespace: parser = ArgumentParser() shtab.add_argument_to(parser) parser.add_argument("--token", help="API key to use for querying.") parser.add_argument( "--match-criteria-id", help="Get the match string with the given matchCriteriaId ", ) return parser.parse_args(args) pontos-25.3.2/pontos/nvd/cpe_match/api.py000066400000000000000000000165121476255566300203340ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2024 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later from datetime import datetime from types import TracebackType from typing import ( Any, Iterator, Optional, Type, ) from httpx import Timeout from pontos.errors import PontosError from pontos.nvd.api import ( DEFAULT_TIMEOUT_CONFIG, JSON, NVDApi, NVDResults, Params, convert_camel_case, format_date, now, ) from pontos.nvd.models.cpe_match_string import CPEMatchString __all__ = ("CPEMatchApi",) DEFAULT_NIST_NVD_CPE_MATCH_URL = ( "https://services.nvd.nist.gov/rest/json/cpematch/2.0" ) MAX_CPE_MATCHES_PER_PAGE = 500 class CPEMatchApi(NVDApi): """ API for querying the NIST NVD CPE match information. Should be used as an async context manager. Example: .. code-block:: python from pontos.nvd.cpe_match import CPEMatchApi async with CPEMatchApi() as api: cpe = await api.cpe_match_string(...) """ def __init__( self, *, token: Optional[str] = None, timeout: Optional[Timeout] = DEFAULT_TIMEOUT_CONFIG, rate_limit: bool = True, ) -> None: """ Create a new instance of the CPE API. Args: token: The API key to use. Using an API key allows to run more requests at the same time. timeout: Timeout settings for the HTTP requests rate_limit: Set to False to ignore rate limits. The public rate limit (without an API key) is 5 requests in a rolling 30 second window. The rate limit with an API key is 50 requests in a rolling 30 second window. See https://nvd.nist.gov/developers/start-here#divRateLimits Default: True. """ super().__init__( DEFAULT_NIST_NVD_CPE_MATCH_URL, token=token, timeout=timeout, rate_limit=rate_limit, ) self._cpe_match_cache: dict[str, Any] = {} def cpe_matches( self, *, last_modified_start_date: Optional[datetime] = None, last_modified_end_date: Optional[datetime] = None, cve_id: Optional[str] = None, match_string_search: Optional[str] = None, request_results: Optional[int] = None, start_index: int = 0, results_per_page: Optional[int] = None, ) -> NVDResults[CPEMatchString]: """ Get all CPE matches for the provided arguments https://nvd.nist.gov/developers/products#divCpeMatch Args: last_modified_start_date: Return all CPE matches last modified after this date. last_modified_end_date: Return all CPE matches last modified before this date. cve_id: Return all CPE matches for this Common Vulnerabilities and Exposures identifier. match_string_search: Return all CPE matches that conform to the given pattern request_results: Number of CPE matches to download. Set to None (default) to download all available matches. start_index: Index of the first CPE match to be returned. Useful only for paginated requests that should not start at the first page. results_per_page: Number of results in a single requests. Mostly useful for paginated requests. Returns: A NVDResponse for CPE matches Example: .. code-block:: python from pontos.nvd.cpe_match import CPEMatchApi async with CPEMatchApi() as api: async for match_string in api.matches(cve_id='CVE-2024-1234'): print(match_string) json = api.matches(cve_id='CVE-2024-1234').json() async for match_strings in api.matches( cve_id='CVE-2024-1234', ).chunks(): for match_string in match_strings: print(match_string) """ params: Params = {} if last_modified_start_date: params["lastModStartDate"] = format_date(last_modified_start_date) if not last_modified_end_date: params["lastModEndDate"] = format_date(now()) if last_modified_end_date: params["lastModEndDate"] = format_date(last_modified_end_date) if cve_id: params["cveId"] = cve_id if match_string_search: params["matchStringSearch"] = match_string_search results_per_page = min( results_per_page or MAX_CPE_MATCHES_PER_PAGE, request_results or MAX_CPE_MATCHES_PER_PAGE, ) if start_index is None: start_index = 0 return NVDResults( self, params, self._result_iterator, request_results=request_results, results_per_page=results_per_page, start_index=start_index, ) def _result_iterator(self, data: JSON) -> Iterator[CPEMatchString]: """ Creates an iterator of all the CPEMatchStrings in given API response JSON Args: data: The JSON response data to get the match strings from Returns: An iterator over the CPEMatchStrings """ results: list[dict[str, Any]] = data.get("match_strings", []) # type: ignore return ( CPEMatchString.from_dict_with_cache( result["match_string"], self._cpe_match_cache ) for result in results ) async def cpe_match(self, match_criteria_id: str) -> CPEMatchString: """ Returns a single CPE match for the given match criteria id. Args: match_criteria_id: Match criteria identifier Returns: A CPE match for the given identifier Raises: PontosError: If match criteria ID is empty or if no match with the given ID is found. Example: .. code-block:: python from pontos.nvd.cpe_match import CVEApi async with CVEApi() as api: match = await api.cpe_match("36FBCF0F-8CEE-474C-8A04-5075AF53FAF4") print(match) """ if not match_criteria_id: raise PontosError("Missing Match Criteria ID.") response = await self._get( params={"matchCriteriaId": str(match_criteria_id)} ) response.raise_for_status() data = response.json(object_hook=convert_camel_case) match_strings = data["match_strings"] if not match_strings: raise PontosError( f"No match with Match Criteria ID '{match_criteria_id}' found." ) match_string = match_strings[0] return CPEMatchString.from_dict_with_cache( match_string["match_string"], self._cpe_match_cache ) async def __aenter__(self) -> "CPEMatchApi": await super().__aenter__() return self async def __aexit__( self, exc_type: Optional[Type[BaseException]], exc_value: Optional[BaseException], traceback: Optional[TracebackType], ) -> Optional[bool]: return await super().__aexit__( # type: ignore exc_type, exc_value, traceback ) pontos-25.3.2/pontos/nvd/cve/000077500000000000000000000000001476255566300160365ustar00rootroot00000000000000pontos-25.3.2/pontos/nvd/cve/__init__.py000066400000000000000000000024071476255566300201520ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # import asyncio from argparse import Namespace from typing import Callable import httpx from pontos.nvd.cve.api import CVEApi from ._parser import cve_parser, cves_parser __all__ = ("CVEApi",) async def query_cves(args: Namespace) -> None: async with CVEApi(token=args.token) as api: async for cve in api.cves( keywords=args.keywords, cpe_name=args.cpe_name, cvss_v2_vector=args.cvss_v2_vector, cvss_v3_vector=args.cvss_v3_vector, source_identifier=args.source_identifier, request_results=args.number, start_index=args.start, ): print(cve) async def query_cve(args: Namespace) -> None: async with CVEApi(token=args.token) as api: cve = await api.cve(args.cve_id) print(cve) def cves_main() -> None: main(cves_parser(), query_cves) def cve_main() -> None: main(cve_parser(), query_cve) def main(args: Namespace, func: Callable) -> None: try: asyncio.run(func(args)) except KeyboardInterrupt: pass except httpx.HTTPStatusError as e: print(f"HTTP Error {e.response.status_code}: {e.response.text}") pontos-25.3.2/pontos/nvd/cve/_parser.py000066400000000000000000000031661476255566300200510ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2024 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later from argparse import ArgumentParser, Namespace from typing import Optional, Sequence import shtab def cves_parser(args: Optional[Sequence[str]] = None) -> Namespace: parser = ArgumentParser() shtab.add_argument_to(parser) parser.add_argument("--token", help="API key to use for querying.") parser.add_argument( "--keywords", nargs="*", help="Search for CVEs containing the keyword in their description.", ) parser.add_argument( "--cpe-name", help="Get all CVE information associated with the CPE" ) parser.add_argument( "--cvss-v2-vector", help="Get all CVE information with the CVSSv2 vector", ) parser.add_argument( "--cvss-v3-vector", help="Get all CVE information with the CVSSv3 vector", ) parser.add_argument( "--source-identifier", help="Get all CVE information with the source identifier. For example: " "cve@mitre.org", ) parser.add_argument( "--number", "-n", metavar="N", help="Request only N CVEs", type=int ) parser.add_argument( "--start", "-s", help="Index of the first CVE to request.", type=int, ) return parser.parse_args(args) def cve_parser(args: Optional[Sequence[str]] = None) -> Namespace: parser = ArgumentParser() shtab.add_argument_to(parser) parser.add_argument("--token", help="API key to use for querying.") parser.add_argument("cve_id", metavar="CVE-ID", help="ID of the CVE") return parser.parse_args(args) pontos-25.3.2/pontos/nvd/cve/api.py000066400000000000000000000264171476255566300171730ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # from datetime import datetime from types import TracebackType from typing import ( Iterable, Iterator, List, Optional, Type, Union, ) from httpx import Timeout from pontos.errors import PontosError from pontos.nvd.api import ( DEFAULT_TIMEOUT_CONFIG, JSON, NVDApi, NVDResults, Params, convert_camel_case, format_date, now, ) from pontos.nvd.models.cve import CVE from pontos.nvd.models.cvss_v2 import Severity as CVSSv2Severity from pontos.nvd.models.cvss_v3 import Severity as CVSSv3Severity __all__ = ("CVEApi",) DEFAULT_NIST_NVD_CVES_URL = "https://services.nvd.nist.gov/rest/json/cves/2.0" MAX_CVES_PER_PAGE = 2000 def _result_iterator(data: JSON) -> Iterator[CVE]: vulnerabilities: Iterable = data.get("vulnerabilities", []) # type: ignore return ( CVE.from_dict(vulnerability["cve"]) for vulnerability in vulnerabilities ) class CVEApi(NVDApi): """ API for querying the NIST NVD CVE information. Should be used as an async context manager. Example: .. code-block:: python from pontos.nvd.cve import CVEApi async with CVEApi() as api: cve = await api.cve("CVE-2022-45536") """ def __init__( self, *, token: Optional[str] = None, timeout: Optional[Timeout] = DEFAULT_TIMEOUT_CONFIG, rate_limit: bool = True, request_attempts: int = 1, ) -> None: """ Create a new instance of the CVE API. Args: token: The API key to use. Using an API key allows to run more requests at the same time. timeout: Timeout settings for the HTTP requests rate_limit: Set to False to ignore rate limits. The public rate limit (without an API key) is 5 requests in a rolling 30 second window. The rate limit with an API key is 50 requests in a rolling 30 second window. See https://nvd.nist.gov/developers/start-here#divRateLimits Default: True. request_attempts: The number of attempts per HTTP request. Defaults to 1. """ super().__init__( DEFAULT_NIST_NVD_CVES_URL, token=token, timeout=timeout, rate_limit=rate_limit, request_attempts=request_attempts, ) def cves( self, *, last_modified_start_date: Optional[datetime] = None, last_modified_end_date: Optional[datetime] = None, published_start_date: Optional[datetime] = None, published_end_date: Optional[datetime] = None, cpe_name: Optional[str] = None, is_vulnerable: Optional[bool] = None, cvss_v2_vector: Optional[str] = None, cvss_v2_severity: Optional[CVSSv2Severity] = None, cvss_v3_vector: Optional[str] = None, cvss_v3_severity: Optional[CVSSv3Severity] = None, keywords: Optional[Union[List[str], str]] = None, cwe_id: Optional[str] = None, source_identifier: Optional[str] = None, virtual_match_string: Optional[str] = None, has_cert_alerts: Optional[bool] = None, has_cert_notes: Optional[bool] = None, has_kev: Optional[bool] = None, has_oval: Optional[bool] = None, request_results: Optional[int] = None, start_index: int = 0, results_per_page: Optional[int] = None, ) -> NVDResults[CVE]: """ Get all CVEs for the provided arguments https://nvd.nist.gov/developers/vulnerabilities#divGetCves Args: last_modified_start_date: Return all CVEs modified after this date. last_modified_end_date: Return all CVEs modified before this date. If last_modified_start_date is set but no last_modified_end_date is passed it is set to now. published_start_date: Return all CVEs that were added to the NVD (i.e., published) after this date. published_end_date: Return all CVEs that were added to the NVD (i.e., published) before this date. If published_start_date is set but no published_end_date is passed it is set to now. cpe_name: Return all CVEs associated with a specific CPE. The exact value provided with cpe_name is compared against the CPE Match Criteria within a CVE applicability statement. If the value of cpe_name is considered to match, the CVE is included in the results. is_vulnerable: Return only CVEs that match cpe_name that are vulnerable. Requires cpe_name to be set. cvss_v2_vector: Return all CVEs matching this CVSSv2 vector cvss_v2_severity: Return all CVEs matching the CVSSv2 severity cvss_v3_vector: Return all CVEs matching this CVSSv3 vector cvss_v3_severity: Return all CVEs matching the CVSSv3 severity keywords: Returns only the CVEs where a word or phrase is found in the current description. cwe_id: Returns only the CVEs that include a weakness identified by Common Weakness Enumeration using the provided cwe_id. source_identifier: Returns CVEs where the exact value of source_identifier appears as a data source in the CVE record. For example: cve@mitre.org virtual_match_string: Filters CVEs more broadly than cpe_name. The exact value of virtual_match_string is compared against the CPE Match Criteria present on CVE applicability statements. If cpe_name and virtual_match_string are provided only cpe_name is considered. has_cert_alerts: Returns the CVEs that contain a Technical Alert from US-CERT. has_cert_notes: Returns the CVEs that contain a Vulnerability Note from CERT/CC. has_kev: Returns the CVE that appear in CISA's Known Exploited Vulnerabilities (KEV) Catalog. has_oval: Returns the CVEs that contain information from MITRE's Open Vulnerability and Assessment Language (OVAL) before this transitioned to the Center for Internet Security (CIS). request_results: Number of CVEs to download. Set to None (default) to download all available CVEs. start_index: Index of the first CVE to be returned. Useful only for paginated requests that should not start at the first page. results_per_page: Number of results in a single requests. Mostly useful for paginated requests. Returns: A NVDResponse for CVEs Examples: .. code-block:: python from pontos.nvd.cve import CVEApi async with CVEApi() as api: async for cve in api.cves(keywords=["Mac OS X", "kernel"]): print(cve.id) json = await api.cves( cpe_name="cpe:2.3:o:microsoft:windows_7:-:*:*:*:*:*:x64:*", ).json() async for cves in api.cves( virtual_match_string="cpe:2.3:o:microsoft:windows_7:-:*:*:*:*:*:x64:*", ).chunks(): for cve in cves: print(cve) """ params: Params = {} if last_modified_start_date: params["lastModStartDate"] = format_date(last_modified_start_date) if not last_modified_end_date: params["lastModEndDate"] = format_date(now()) if last_modified_end_date: params["lastModEndDate"] = format_date(last_modified_end_date) if published_start_date: params["pubStartDate"] = format_date(published_start_date) if not published_end_date: params["pubEndDate"] = format_date(now()) if published_end_date: params["pubEndDate"] = format_date(published_end_date) if cpe_name: params["cpeName"] = cpe_name if is_vulnerable: params["isVulnerable"] = "" if cvss_v2_vector: params["cvssV2Metrics"] = cvss_v2_vector if cvss_v3_vector: params["cvssV3Metrics"] = cvss_v3_vector if cvss_v2_severity: params["cvssV2Severity"] = cvss_v2_severity.value if cvss_v3_severity: params["cvssV3Severity"] = cvss_v3_severity.value if keywords: if isinstance(keywords, str): keywords = [keywords] params["keywordSearch"] = " ".join(keywords) if any((" " in keyword for keyword in keywords)): params["keywordExactMatch"] = "" if cwe_id: params["cweId"] = cwe_id if source_identifier: params["sourceIdentifier"] = source_identifier if not cpe_name and virtual_match_string: params["virtualMatchString"] = virtual_match_string if has_cert_alerts: params["hasCertAlerts"] = "" if has_cert_notes: params["hasCertNotes"] = "" if has_kev: params["hasKev"] = "" if has_oval: params["hasOval"] = "" results_per_page = min( results_per_page or MAX_CVES_PER_PAGE, request_results or MAX_CVES_PER_PAGE, ) return NVDResults( self, params, _result_iterator, request_results=request_results, results_per_page=results_per_page, start_index=start_index, ) async def cve(self, cve_id: str) -> CVE: """ Returns a single CVE matching the CVE ID. Vulnerabilities not yet published in the NVD are not available. Args: cve_id: Common Vulnerabilities and Exposures identifier Returns: A CVE matching the CVE ID Raises: PontosError: If CVE ID is empty or if no CVE with the CVE ID is found. Example: .. code-block:: python from pontos.nvd.cve import CVEApi async with CVEApi() as api: cve = await api.cve("CVE-2022-45536") print(cve) """ if not cve_id: raise PontosError("Missing CVE ID.") response = await self._get(params={"cveId": cve_id}) response.raise_for_status() data = response.json(object_hook=convert_camel_case) vulnerabilities = data["vulnerabilities"] if not vulnerabilities: raise PontosError(f"No CVE with CVE ID '{cve_id}' found.") vulnerability = vulnerabilities[0] return CVE.from_dict(vulnerability["cve"]) async def __aenter__(self) -> "CVEApi": await super().__aenter__() return self async def __aexit__( self, exc_type: Optional[Type[BaseException]], exc_value: Optional[BaseException], traceback: Optional[TracebackType], ) -> Optional[bool]: return await super().__aexit__( # type: ignore exc_type, exc_value, traceback ) pontos-25.3.2/pontos/nvd/cve_changes/000077500000000000000000000000001476255566300175265ustar00rootroot00000000000000pontos-25.3.2/pontos/nvd/cve_changes/__init__.py000066400000000000000000000013751476255566300216450ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later import asyncio from argparse import Namespace from pontos.nvd.cve_changes.api import CVEChangesApi from ._parser import parse_args __all__ = ("CVEChangesApi",) async def query_changes(args: Namespace) -> None: async with CVEChangesApi(token=args.token) as api: async for cve in api.changes( cve_id=args.cve_id, event_name=args.event_name, request_results=args.number, start_index=args.start, ): print(cve) def main() -> None: try: args = parse_args() asyncio.run(query_changes(args)) except KeyboardInterrupt: pass if __name__ == "__main__": main() pontos-25.3.2/pontos/nvd/cve_changes/_parser.py000066400000000000000000000015311476255566300215330ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2024 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later from argparse import ArgumentParser, Namespace from typing import Optional, Sequence import shtab def parse_args(args: Optional[Sequence[str]] = None) -> Namespace: parser = ArgumentParser() shtab.add_argument_to(parser) parser.add_argument("--token", help="API key to use for querying.") parser.add_argument("--cve-id", help="Get changes for a specific CVE") parser.add_argument( "--event-name", help="Get all CVE associated with a specific event name" ) parser.add_argument( "--number", "-n", metavar="N", help="Request only N CPEs", type=int ) parser.add_argument( "--start", "-s", help="Index of the first CPE to request.", type=int, ) return parser.parse_args(args) pontos-25.3.2/pontos/nvd/cve_changes/api.py000066400000000000000000000132751476255566300206610ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later from datetime import datetime, timedelta from typing import Any, Iterator, Optional, Union from httpx import Timeout from pontos.errors import PontosError from pontos.nvd.api import ( DEFAULT_TIMEOUT_CONFIG, JSON, NVDApi, NVDResults, Params, format_date, now, ) from pontos.nvd.models.cve_change import CVEChange, EventName __all__ = ("CVEChangesApi",) DEFAULT_NIST_NVD_CVE_HISTORY_URL = ( "https://services.nvd.nist.gov/rest/json/cvehistory/2.0" ) MAX_CVE_CHANGES_PER_PAGE = 5000 def _result_iterator(data: JSON) -> Iterator[CVEChange]: results: list[dict[str, Any]] = data.get("cve_changes", []) # type: ignore return (CVEChange.from_dict(result["change"]) for result in results) class CVEChangesApi(NVDApi): """ API for querying the NIST NVD CVE Change History information. Should be used as an async context manager. Example: .. code-block:: python from pontos.nvd.cve_changes import CVEChangesApi async with CVEChangesApi() as api: async for cve_change in api.changes(event_name=EventName.INITIAL_ANALYSIS): print(cve_change) """ def __init__( self, *, token: Optional[str] = None, timeout: Optional[Timeout] = DEFAULT_TIMEOUT_CONFIG, rate_limit: bool = True, request_attempts: int = 1, ) -> None: """ Create a new instance of the CVE Change History API. Args: token: The API key to use. Using an API key allows to run more requests at the same time. timeout: Timeout settings for the HTTP requests rate_limit: Set to False to ignore rate limits. The public rate limit (without an API key) is 5 requests in a rolling 30 second window. The rate limit with an API key is 50 requests in a rolling 30 second window. See https://nvd.nist.gov/developers/start-here#divRateLimits Default: True. request_attempts: The number of attempts per HTTP request. Defaults to 1. """ super().__init__( DEFAULT_NIST_NVD_CVE_HISTORY_URL, token=token, timeout=timeout, rate_limit=rate_limit, request_attempts=request_attempts, ) def changes( self, *, change_start_date: Optional[datetime] = None, change_end_date: Optional[datetime] = None, cve_id: Optional[str] = None, event_name: Optional[Union[EventName, str]] = None, request_results: Optional[int] = None, start_index: int = 0, results_per_page: Optional[int] = None, ) -> NVDResults[CVEChange]: """ Get all CVEs for the provided arguments https://nvd.nist.gov/developers/vulnerabilities#divGetCves Args: change_start_date: Return all CVE changes after this date. change_end_date: Return all CVE changes before this date. cve_id: Return all CVE changes for this Common Vulnerabilities and Exposures identifier. event_name: Return all CVE changes with this event name. request_results: Number of CVEs changes to download. Set to None (default) to download all available CPEs. start_index: Index of the first CVE change to be returned. Useful only for paginated requests that should not start at the first page. results_per_page: Number of results in a single requests. Mostly useful for paginated requests. Returns: A NVDResponse for CVE changes Example: .. code-block:: python from pontos.nvd.cve_changes import CVEChangesApi async with CVEChangesApi() as api: async for cve_change in api.changes(event_name=EventName.INITIAL_ANALYSIS): print(cve_change) json = api.changes(event_name=EventName.INITIAL_ANALYSIS).json() async for changes in api.changes( event_name=EventName.INITIAL_ANALYSIS, ).chunks(): for cve_change in changes: print(cve_change) """ if change_start_date and not change_end_date: change_end_date = min( now(), change_start_date + timedelta(days=120) ) elif change_end_date and not change_start_date: change_start_date = change_end_date - timedelta(days=120) params: Params = {} if change_start_date and change_end_date: if change_end_date - change_start_date > timedelta(days=120): raise PontosError( "change_start_date and change_end_date must not be more than 120 days apart" ) params["changeStartDate"] = format_date(change_start_date) params["changeEndDate"] = format_date(change_end_date) if cve_id: params["cveId"] = cve_id if event_name: params["eventName"] = event_name results_per_page = min( results_per_page or MAX_CVE_CHANGES_PER_PAGE, request_results or MAX_CVE_CHANGES_PER_PAGE, ) return NVDResults( self, params, _result_iterator, request_results=request_results, results_per_page=results_per_page, start_index=start_index, ) async def __aenter__(self) -> "CVEChangesApi": await super().__aenter__() return self pontos-25.3.2/pontos/nvd/models/000077500000000000000000000000001476255566300165445ustar00rootroot00000000000000pontos-25.3.2/pontos/nvd/models/__init__.py000066400000000000000000000001411476255566300206510ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # pontos-25.3.2/pontos/nvd/models/cpe.py000066400000000000000000000045021476255566300176660ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # from dataclasses import dataclass, field from datetime import datetime from typing import List, Optional from uuid import UUID from pontos.models import Model, StrEnum __all__ = ( "DeprecatedBy", "ReferenceType", "Reference", "Title", "CPE", ) @dataclass class Title(Model): """ A CPE title Attributes: title: The actual title lang: Language of the title """ title: str lang: str class ReferenceType(StrEnum): """ A CPE reference type Attributes: ADVISORY: The reference is an advisory CHANGELOG: The reference is a changelog PRODUCT: The reference is a product PROJECT: The reference is a project VENDOR: The reference is a vendor VERSION: The reference is version """ ADVISORY = "Advisory" CHANGELOG = "Change Log" PRODUCT = "Product" PROJECT = "Project" VENDOR = "Vendor" VERSION = "Version" @dataclass class Reference(Model): """ A CPE reference Attributes: ref: The content of the reference type: The type of the reference """ ref: str type: Optional[ReferenceType] = None @dataclass class DeprecatedBy(Model): """ A CPE is deprecated by another CPE Attributes: cpe_name: Name of the CPE that deprecates this CPE cpe_name_id: ID of the CPE that deprecates this CPE """ cpe_name: Optional[str] = None cpe_name_id: Optional[UUID] = None @dataclass class CPE(Model): """ Represents a CPE Attributes: cpe_name: The name of the CPE cpe_name_id: UUID of the CPE deprecated: True if the CPE is deprecated last_modified: Last modification date of the CPE created: Creation date of the CPE titles: List of titles for the CPE refs: References to additional data deprecated_by: Additional information about possible deprecation by another CPE """ cpe_name: str cpe_name_id: UUID deprecated: bool last_modified: datetime created: datetime titles: List[Title] = field(default_factory=list) refs: List[Reference] = field(default_factory=list) deprecated_by: List[DeprecatedBy] = field(default_factory=list) pontos-25.3.2/pontos/nvd/models/cpe_match_string.py000066400000000000000000000055051476255566300224340ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2024 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # from dataclasses import dataclass, field from datetime import datetime from typing import Any, List, Optional from uuid import UUID from pontos.models import Model @dataclass class CPEMatch(Model): """ Represents a single CPE match. Attributes: cpe_name: Name of the matching CPE cpe_name_id: Name ID of the matching CPE """ cpe_name: str cpe_name_id: UUID @dataclass class CPEMatchString(Model): """ Represents a CPE match string, matching criteria to one or more CPEs Attributes: match_criteria_id: The identifier of the CPE match criteria: The CPE formatted match criteria version_start_including: Optional start of the matching version range, including the given version version_start_excluding: Optional start of the matching version range, excluding the given version version_end_including: Optional end of the matching version range, including the given version version_end_excluding: Optional end of the matching version range, excluding the given version status: Status of the CPE match cpe_last_modified: Optional date the CPEs list of the match was last modified created: Creation date of the CPE last_modified: Last modification date of the CPE matches: List of CPEs matching the criteria string and the optional range limits """ match_criteria_id: UUID criteria: str status: str created: datetime last_modified: datetime cpe_last_modified: Optional[datetime] = None matches: List[CPEMatch] = field(default_factory=list) version_start_including: Optional[str] = None version_start_excluding: Optional[str] = None version_end_including: Optional[str] = None version_end_excluding: Optional[str] = None @classmethod def from_dict_with_cache( cls, data: dict[str, Any], cpe_match_cache: dict[str, CPEMatch], ): """ Create a CPEMatchString model from a dict, reusing duplicate CPEMatch objects to reduce memory usage if a cache dict is given. Args: data: The JSON dict to generate the model from cpe_match_cache: A dictionary to store CPE matches or None to not cache and reused CPE matches """ new_match_string = cls.from_dict(data) for i, match in enumerate(new_match_string.matches): cached_match: Optional[CPEMatch] = cpe_match_cache.get( match.cpe_name_id ) if cached_match and cached_match.cpe_name == match.cpe_name: new_match_string.matches[i] = cached_match else: cpe_match_cache[match.cpe_name_id] = match return new_match_string pontos-25.3.2/pontos/nvd/models/cve.py000066400000000000000000000152351476255566300177010ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # from dataclasses import dataclass, field from datetime import date, datetime from typing import List, Optional from pontos.models import Model, StrEnum from pontos.nvd.models.cvss_v2 import CVSSData as CVSSv2Data from pontos.nvd.models.cvss_v3 import CVSSData as CVSSv3Data __all__ = ( "Configuration", "CPEMatch", "CVSSType", "CVSSv2Metric", "CVSSv3Metric", "Description", "Metrics", "Node", "Operator", "Reference", "VendorComment", "Weakness", "CVE", ) class CVSSType(StrEnum): """ The CVSS Type: primary or secondary Attributes: PRIMARY: A primary CVSS SECONDARY: A secondary CVSS """ PRIMARY = "Primary" SECONDARY = "Secondary" @dataclass class Description(Model): """ A description in a specific language Attributes: lang: Language of the description value: The actual description """ lang: str value: str @dataclass class CVSSv2Metric(Model): """ A CVSSv3 metric Attributes: source: The source of the CVSS type: The CVSS type cvss_data: The actual CVSSv2 data base_severity: exploitability_score: impact_score: ac_insuf_info: obtain_all_privilege: obtain_user_privilege: obtain_other_privilege: user_interaction_required: """ source: str type: CVSSType cvss_data: CVSSv2Data base_severity: Optional[str] = None exploitability_score: Optional[float] = None impact_score: Optional[float] = None ac_insuf_info: Optional[bool] = None obtain_all_privilege: Optional[bool] = None obtain_user_privilege: Optional[bool] = None obtain_other_privilege: Optional[bool] = None user_interaction_required: Optional[bool] = None @dataclass class CVSSv3Metric(Model): """ A CVSSv3 metric Attributes: source: The source of the CVSS type: The CVSS type cvss_data: The actual CVSSv3 data exploitability_score: impact_score: """ source: str type: CVSSType cvss_data: CVSSv3Data exploitability_score: Optional[float] = None impact_score: Optional[float] = None @dataclass class Metrics(Model): """ CVE metrics Attributes: cvss_metric_v31: A list of CVSSv3.1 metrics cvss_metric_v30: A list of CVSSv3.0 metrics cvss_metric_v2: A list of CVSSv2 metrics """ cvss_metric_v31: List[CVSSv3Metric] = field(default_factory=list) cvss_metric_v30: List[CVSSv3Metric] = field(default_factory=list) cvss_metric_v2: List[CVSSv2Metric] = field(default_factory=list) @dataclass class Reference(Model): """ A CVE reference Attributes: url: URL to the reference source: Source of the reference tags: List of tags for the reference """ url: str source: Optional[str] = None tags: List[str] = field(default_factory=list) @dataclass class Weakness(Model): """ Attributes: source: type: description: """ source: str type: str description: List[Description] = field(default_factory=list) @dataclass class VendorComment(Model): """ A vendor comment Attributes: organization: Name of the vendor comment: The actual comment of the vendor last_modified: Last modification date of the comment """ organization: str comment: str last_modified: datetime class Operator(StrEnum): """ An operator: AND or OR Attributes: AND: A and operator OR: A or operator """ AND = "AND" OR = "OR" @dataclass class CPEMatch(Model): """ A CPE match referencing a vulnerable product with a version range Attributes: vulnerable: criteria: match_criteria_id: version_start_excluding: Matches the CPE excluding the specified version version_start_including: Matches the CPE including the specified version version_end_excluding: Matches the CPE excluding up to the specified version version_end_including: Matches the CPE including up to the specified version """ vulnerable: bool criteria: str match_criteria_id: str version_start_excluding: Optional[str] = None version_start_including: Optional[str] = None version_end_excluding: Optional[str] = None version_end_including: Optional[str] = None @dataclass class Node(Model): """ A CVE configuration node Attributes: operator: Operator (and/or) for this node cpe_match: The CPE match for the node. Despite a cpe match is required int NISTs API spec the data seems to contain nodes without matches. negate: """ operator: Operator cpe_match: Optional[List[CPEMatch]] = None negate: Optional[bool] = None @dataclass class Configuration(Model): """ A CVE configuration Attributes: nodes: operator: negate: """ nodes: List[Node] operator: Optional[Operator] = None negate: Optional[bool] = None @dataclass class CVE(Model): """ A model representing a CVE Attributes: id: ID of the CVE source_identifier: Identifier for the source of the CVE published: Date of publishing last_modified: Last modification date vuln_status: Current vulnerability status descriptions: List of additional descriptions references: List of additional references (URLs) weaknesses: List of weaknesses configurations: List of configurations vendor_comments: List of vendor comments metrics: List of CVSS metrics for this CVE evaluator_comment: evaluator_solution: evaluator_impact: cisa_exploit_add: cisa_action_due: cisa_required_action: cisa_vulnerability_name: """ id: str source_identifier: str published: datetime last_modified: datetime vuln_status: str descriptions: List[Description] references: List[Reference] weaknesses: List[Weakness] = field(default_factory=list) configurations: List[Configuration] = field(default_factory=list) vendor_comments: List[VendorComment] = field(default_factory=list) metrics: Optional[Metrics] = None evaluator_comment: Optional[str] = None evaluator_solution: Optional[str] = None evaluator_impact: Optional[str] = None cisa_exploit_add: Optional[date] = None cisa_action_due: Optional[date] = None cisa_required_action: Optional[str] = None cisa_vulnerability_name: Optional[str] = None pontos-25.3.2/pontos/nvd/models/cve_change.py000066400000000000000000000022531476255566300212020ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later from dataclasses import dataclass from datetime import datetime from typing import Optional from uuid import UUID from pontos.models import Model, StrEnum class EventName(StrEnum): NEW_CVE_RECEIVED = "New CVE Received" INITIAL_ANALYSIS = "Initial Analysis" REANALYSIS = "Reanalysis" CVE_MODIFIED = "CVE Modified" MODIFIED_ANALYSIS = "Modified Analysis" CVE_TRANSLATED = "CVE Translated" VENDOR_COMMENT = "Vendor Comment" CVE_SOURCE_UPDATE = "CVE Source Update" CPE_DEPRECATION_REMAP = "CPE Deprecation Remap" CWE_REMAP = "CWE Remap" CVE_REJECTED = "CVE Rejected" CVE_UNREJECTED = "CVE Unrejected" CVE_CISA_KEV_UPDATE = "CVE CISA KEV Update" REFERENCE_TAG_UPDATE = "Reference Tag Update" @dataclass class Detail: type: str action: Optional[str] = None old_value: Optional[str] = None new_value: Optional[str] = None @dataclass class CVEChange(Model): cve_id: str event_name: EventName cve_change_id: UUID source_identifier: str created: Optional[datetime] = None details: Optional[list[Detail]] = None pontos-25.3.2/pontos/nvd/models/cvss_v2.py000066400000000000000000000051371476255566300205110ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # from dataclasses import dataclass from typing import Optional from pontos.models import Model, StrEnum class Severity(StrEnum): LOW = "LOW" MEDIUM = "MEDIUM" HIGH = "HIGH" class AccessVector(StrEnum): NETWORK = "NETWORK" ADJACENT_NETWORK = "ADJACENT_NETWORK" LOCAL = "LOCAL" class AccessComplexity(StrEnum): HIGH = "HIGH" MEDIUM = "MEDIUM" LOW = "LOW" class Authentication(StrEnum): MULTIPLE = "MULTIPLE" SINGLE = "SINGLE" NONE = "NONE" class Impact(StrEnum): NONE = "NONE" PARTIAL = "PARTIAL" COMPLETE = "COMPLETE" class Exploitability(StrEnum): UNPROVEN = "UNPROVEN" PROOF_OF_CONCEPT = "PROOF_OF_CONCEPT" FUNCTIONAL = "FUNCTIONAL" HIGH = "HIGH" NOT_DEFINED = "NOT_DEFINED" class RemediationLevel(StrEnum): OFFICIAL_FIX = "OFFICIAL_FIX" TEMPORARY_FIX = "TEMPORARY_FIX" WORKAROUND = "WORKAROUND" UNAVAILABLE = "UNAVAILABLE" NOT_DEFINED = "NOT_DEFINED" class ReportConfidence(StrEnum): UNCONFIRMED = "UNCONFIRMED" UNCORROBORATED = "UNCORROBORATED" CONFIRMED = "CONFIRMED" NOT_DEFINED = "NOT_DEFINED" class CollateralDamagePotential(StrEnum): NONE = "NONE" LOW = "LOW" LOW_MEDIUM = "LOW_MEDIUM" MEDIUM_HIGH = "MEDIUM_HIGH" HIGH = "HIGH" NOT_DEFINED = "NOT_DEFINED" class TargetDistribution(StrEnum): NONE = "NONE" LOW = "LOW" MEDIUM = "MEDIUM" HIGH = "HIGH" NOT_DEFINED = "NOT_DEFINED" class Requirement(StrEnum): LOW = "LOW" MEDIUM = "MEDIUM" HIGH = "HIGH" NOT_DEFINED = "NOT_DEFINED" @dataclass class CVSSData(Model): version: str vector_string: str base_score: float access_vector: Optional[AccessVector] = None access_complexity: Optional[AccessComplexity] = None authentication: Optional[Authentication] = None confidentiality_impact: Optional[Impact] = None integrity_impact: Optional[Impact] = None availability_impact: Optional[Impact] = None exploitability: Optional[Exploitability] = None remediation_level: Optional[RemediationLevel] = None report_confidence: Optional[ReportConfidence] = None temporal_score: Optional[float] = None collateral_damage_potential: Optional[CollateralDamagePotential] = None target_distribution: Optional[TargetDistribution] = None confidentiality_requirement: Optional[Requirement] = None integrity_requirement: Optional[Requirement] = None availability_requirement: Optional[Requirement] = None environmental_score: Optional[float] = None pontos-25.3.2/pontos/nvd/models/cvss_v3.py000066400000000000000000000074621476255566300205150ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # from dataclasses import dataclass from typing import Optional from pontos.models import Model, StrEnum class Severity(StrEnum): NONE = "NONE" LOW = "LOW" MEDIUM = "MEDIUM" HIGH = "HIGH" CRITICAL = "CRITICAL" class AttackVector(StrEnum): NETWORK = "NETWORK" ADJACENT_NETWORK = "ADJACENT_NETWORK" LOCAL = "LOCAL" PHYSICAL = "PHYSICAL" class ModifiedAttackVector(StrEnum): NETWORK = "NETWORK" ADJACENT_NETWORK = "ADJACENT_NETWORK" LOCAL = "LOCAL" PHYSICAL = "PHYSICAL" NOT_DEFINED = "NOT_DEFINED" class AttackComplexity(StrEnum): HIGH = "HIGH" LOW = "LOW" class ModifiedAttackComplexity(StrEnum): HIGH = "HIGH" LOW = "LOW" NOT_DEFINED = "NOT_DEFINED" class PrivilegesRequired(StrEnum): HIGH = "HIGH" LOW = "LOW" NONE = "NONE" class ModifiedPrivilegesRequired(StrEnum): HIGH = "HIGH" LOW = "LOW" NONE = "NONE" NOT_DEFINED = "NOT_DEFINED" class UserInteraction(StrEnum): NONE = "NONE" REQUIRED = "REQUIRED" class ModifiedUserInteraction(StrEnum): NONE = "NONE" REQUIRED = "REQUIRED" NOT_DEFINED = "NOT_DEFINED" class Scope(StrEnum): UNCHANGED = "UNCHANGED" CHANGED = "CHANGED" class ModifiedScope(StrEnum): UNCHANGED = "UNCHANGED" CHANGED = "CHANGED" NOT_DEFINED = "NOT_DEFINED" class Impact(StrEnum): NONE = "NONE" LOW = "LOW" HIGH = "HIGH" class ModifiedImpact(StrEnum): NONE = "NONE" LOW = "LOW" HIGH = "HIGH" NOT_DEFINED = "NOT_DEFINED" class ExploitCodeMaturity(StrEnum): UNPROVEN = "UNPROVEN" PROOF_OF_CONCEPT = "PROOF_OF_CONCEPT" FUNCTIONAL = "FUNCTIONAL" HIGH = "HIGH" NOT_DEFINED = "NOT_DEFINED" class RemediationLevel(StrEnum): OFFICIAL_FIX = "OFFICIAL_FIX" TEMPORARY_FIX = "TEMPORARY_FIX" WORKAROUND = "WORKAROUND" UNAVAILABLE = "UNAVAILABLE" NOT_DEFINED = "NOT_DEFINED" class Confidence(StrEnum): UNKNOWN = "UNKNOWN" REASONABLE = "REASONABLE" CONFIRMED = "CONFIRMED" NOT_DEFINED = "NOT_DEFINED" class Requirement(StrEnum): LOW = "LOW" MEDIUM = "MEDIUM" HIGH = "HIGH" NOT_DEFINED = "NOT_DEFINED" @dataclass class CVSSData(Model): version: str vector_string: str base_score: float base_severity: Severity attack_vector: Optional[AttackVector] = None attack_complexity: Optional[AttackComplexity] = None privileges_required: Optional[PrivilegesRequired] = None user_interaction: Optional[UserInteraction] = None scope: Optional[Scope] = None confidentiality_impact: Optional[Impact] = None integrity_impact: Optional[Impact] = None availability_impact: Optional[Impact] = None exploit_code_maturity: Optional[ExploitCodeMaturity] = None remediation_level: Optional[RemediationLevel] = None report_confidence: Optional[Confidence] = None temporal_score: Optional[float] = None temporal_severity: Optional[Severity] = None confidentiality_requirement: Optional[Requirement] = None integrity_requirement: Optional[Requirement] = None availability_requirement: Optional[Requirement] = None modified_attack_vector: Optional[ModifiedAttackVector] = None modified_attack_complexity: Optional[ModifiedAttackComplexity] = None modified_privileges_required: Optional[ModifiedPrivilegesRequired] = None modified_user_interaction: Optional[ModifiedUserInteraction] = None modified_scope: Optional[ModifiedScope] = None modified_confidentiality_impact: Optional[ModifiedImpact] = None modified_integrity_impact: Optional[ModifiedImpact] = None modified_availability_impact: Optional[ModifiedImpact] = None environmental_score: Optional[float] = None environmental_severity: Optional[Severity] = None pontos-25.3.2/pontos/nvd/models/source.py000066400000000000000000000013201476255566300204120ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2025 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later from dataclasses import dataclass, field from datetime import datetime from typing import List, Optional from pontos.models import Model @dataclass class AcceptanceLevel(Model): description: str last_modified: datetime @dataclass class Source(Model): last_modified: datetime created: datetime name: Optional[str] = None source_identifiers: List[str] = field(default_factory=list) contact_email: Optional[str] = None v2_acceptance_level: Optional[AcceptanceLevel] = None v3_acceptance_level: Optional[AcceptanceLevel] = None cwe_acceptance_level: Optional[AcceptanceLevel] = None pontos-25.3.2/pontos/nvd/source/000077500000000000000000000000001476255566300165615ustar00rootroot00000000000000pontos-25.3.2/pontos/nvd/source/__init__.py000066400000000000000000000013401476255566300206700ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2025 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later import asyncio from argparse import Namespace from pontos.nvd.source.api import SourceApi from ._parser import parse_args __all__ = ("SourceApi",) async def query_changes(args: Namespace) -> None: async with SourceApi(token=args.token) as api: async for source in api.sources( source_identifier=args.source_identifier, request_results=args.number, start_index=args.start, ): print(source) def main() -> None: try: args = parse_args() asyncio.run(query_changes(args)) except KeyboardInterrupt: pass if __name__ == "__main__": main() pontos-25.3.2/pontos/nvd/source/_parser.py000066400000000000000000000014631476255566300205720ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2025 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later from argparse import ArgumentParser, Namespace from typing import Optional, Sequence import shtab def parse_args(args: Optional[Sequence[str]] = None) -> Namespace: parser = ArgumentParser() shtab.add_argument_to(parser) parser.add_argument("--token", help="API key to use for querying.") parser.add_argument( "--source-identifier", help="Get sources record for this source identifier", ) parser.add_argument( "--number", "-n", metavar="N", help="Request only N sources", type=int ) parser.add_argument( "--start", "-s", help="Index of the first source to request.", type=int, default=0, ) return parser.parse_args(args) pontos-25.3.2/pontos/nvd/source/api.py000066400000000000000000000113711476255566300177070ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2025 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later from datetime import datetime from typing import ( Iterable, Iterator, Optional, ) from httpx import Timeout from pontos.nvd.api import ( DEFAULT_TIMEOUT_CONFIG, JSON, NVDApi, NVDResults, Params, format_date, now, ) from pontos.nvd.models.source import Source __all__ = ("SourceApi",) DEFAULT_NIST_NVD_SOURCE_URL = ( "https://services.nvd.nist.gov/rest/json/source/2.0" ) MAX_SOURCES_PER_PAGE = 1000 def _result_iterator(data: JSON) -> Iterator[Source]: sources: Iterable = data.get("sources", []) # type: ignore return (Source.from_dict(source) for source in sources) class SourceApi(NVDApi): """ API for querying the NIST NVD source API. Should be used as an async context manager. Example: .. code-block:: python from pontos.nvd.source import SourceApi async with SourceApi() as api: async for source in api.sources(): print(source) """ def __init__( self, *, token: Optional[str] = None, timeout: Optional[Timeout] = DEFAULT_TIMEOUT_CONFIG, rate_limit: bool = True, request_attempts: int = 1, ) -> None: """ Create a new instance of the source API. Args: token: The API key to use. Using an API key allows to run more requests at the same time. timeout: Timeout settings for the HTTP requests rate_limit: Set to False to ignore rate limits. The public rate limit (without an API key) is 5 requests in a rolling 30 second window. The rate limit with an API key is 50 requests in a rolling 30 second window. See https://nvd.nist.gov/developers/start-here#divRateLimits Default: True. request_attempts: The number of attempts per HTTP request. Defaults to 1. """ super().__init__( DEFAULT_NIST_NVD_SOURCE_URL, token=token, timeout=timeout, rate_limit=rate_limit, request_attempts=request_attempts, ) def sources( self, *, last_modified_start_date: Optional[datetime] = None, last_modified_end_date: Optional[datetime] = None, source_identifier: Optional[str] = None, request_results: Optional[int] = None, start_index: int = 0, results_per_page: Optional[int] = None, ) -> NVDResults[Source]: """ Get all sources for the provided arguments https://nvd.nist.gov/developers/data-sources#divGetSource Args: last_modified_start_date: Return all sources modified after this date. last_modified_end_date: Return all sources modified before this date. If last_modified_start_date is set but no last_modified_end_date is passed it is set to now. source_identifier: Return all source records where the source identifier matches. request_results: Number of sources to download. Set to None (default) to download all available CPEs. start_index: Index of the first source to be returned. Useful only for paginated requests that should not start at the first page. results_per_page: Number of results in a single requests. Mostly useful for paginated requests. Returns: A NVDResponse for sources Examples: .. code-block:: python from pontos.nvd.source import SourceApi async with SourceApi() as api: async for source in api.sources(source_identifier="cve@mitre.org"): print(source) """ params: Params = {} if last_modified_start_date: params["lastModStartDate"] = format_date(last_modified_start_date) if not last_modified_end_date: params["lastModEndDate"] = format_date(now()) if last_modified_end_date: params["lastModEndDate"] = format_date(last_modified_end_date) if source_identifier: params["sourceIdentifier"] = source_identifier results_per_page = min( results_per_page or MAX_SOURCES_PER_PAGE, request_results or MAX_SOURCES_PER_PAGE, ) return NVDResults( self, params, _result_iterator, request_results=request_results, results_per_page=results_per_page, start_index=start_index, ) async def __aenter__(self) -> "SourceApi": await super().__aenter__() return self pontos-25.3.2/pontos/pontos.py000066400000000000000000000055411476255566300163730ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2021-2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # import sys from pontos.terminal import RichTerminal from pontos.version import __version__ def main() -> None: term = RichTerminal() if len(sys.argv) > 1 and sys.argv[1] == "--version": term.print(f"pontos version {__version__}") return term.print() term.bold_info("pontos - Greenbone Python Utilities and Tools") term.print() term.print("The following commands are currently available:") with term.indent(): term.bold_info( "pontos-release - Release handling " "utility for C and Python Projects" ) term.print("usage:") with term.indent(): term.print("pontos-release {prepare,release,sign} -h") term.bold_info( "pontos-version - Version handling utility " "for C, Go and Python Projects" ) term.print("usage:") with term.indent(): term.print("pontos-version {verify,show,update} -h") term.bold_info( "pontos-update-header - Handling Copyright header " "for various file types and licenses" ) term.print("usage:") with term.indent(): term.print("pontos-update-header -h") term.bold_info( "pontos-changelog - Parse conventional commits in the " "current branch, creating CHANGELOG.md file" ) term.print("usage:") with term.indent(): term.print("pontos-changelog -h") term.bold_info( "pontos-github - Handling GitHub operations, like " "Pull Requests (beta)" ) term.print("usage:") with term.indent(): term.print("pontos-github {pr} -h") term.bold_info( "pontos-github-script - CLI for running scripts on the GitHub API" ) term.print("usage:") with term.indent(): term.print("pontos-github-script {script} -h") term.print() term.bold_info("pontos-github-actions - GitHub Actions API CLI") term.print("usage:") with term.indent(): term.print("pontos-github-actions -h") term.print() term.info( "pontos also comes with a Terminal interface " "printing prettier outputs" ) with term.indent(): term.print('Accessible with import "pontos.terminal"') term.info("pontos also comes with git and GitHub APIs") with term.indent(): term.print( 'Accessible with "import pontos.git" ' 'and "import pontos.github"' ) term.print() term.warning( 'Use the listed commands "help" for more information ' "and arguments description." ) if __name__ == "__main__": main() pontos-25.3.2/pontos/py.typed000066400000000000000000000000001476255566300161570ustar00rootroot00000000000000pontos-25.3.2/pontos/release/000077500000000000000000000000001476255566300161125ustar00rootroot00000000000000pontos-25.3.2/pontos/release/__init__.py000066400000000000000000000010471476255566300202250ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2021-2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # from .create import CreateReleaseCommand, CreateReleaseReturnValue from .helper import ReleaseType, find_signing_key, get_git_repository_name from .main import main from .sign import SignatureError, SignCommand, SignReturnValue __all__ = ( "ReleaseType", "get_git_repository_name", "find_signing_key", "CreateReleaseCommand", "CreateReleaseReturnValue", "SignCommand", "SignatureError", "SignReturnValue", "main", ) pontos-25.3.2/pontos/release/_parser.py000066400000000000000000000264761476255566300201360ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2020-2024 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # import argparse import os from argparse import ( ArgumentParser, ArgumentTypeError, BooleanOptionalAction, Namespace, ) from pathlib import Path from typing import Optional, Sequence, Tuple import shtab from pontos.enum import enum_choice, enum_type, to_choices from pontos.git._git import DEFAULT_TAG_PREFIX from pontos.version.schemes import ( VERSIONING_SCHEMES, PEP440VersioningScheme, VersioningScheme, versioning_scheme_argument_type, ) from .create import create_release from .helper import ReleaseType from .show import OutputFormat, show from .sign import sign DEFAULT_SIGNING_KEY = "0ED1E580" class ReleaseVersionAction(argparse._StoreAction): def __call__(self, parser, namespace, values, option_string=None): setattr(namespace, "release_type", ReleaseType.VERSION) setattr(namespace, self.dest, values) def repository_type(value: str) -> str: """ Validates the repository format of owner/name """ splitted = value.split("/") if len(splitted) != 2: raise ArgumentTypeError( f"Invalid repository format {value}. Format must be owner/name." ) return value def add_create_parser( subparsers: argparse._SubParsersAction, ) -> None: create_parser: ArgumentParser = subparsers.add_parser( "create", aliases=["release"], help="Create a new release", description="Create a new release", ) create_parser.set_defaults(func=create_release) create_parser.add_argument( "--versioning-scheme", help="Versioning scheme to use for parsing and handling version " f"information. Choices are {', '.join(VERSIONING_SCHEMES.keys())}. " "Default: %(default)s", default="pep440", type=versioning_scheme_argument_type, ) create_parser.add_argument( "--release-type", help="Select the release type for calculating the release version. " f"Possible choices are: {to_choices(ReleaseType)}.", type=enum_type(ReleaseType), choices=enum_choice(ReleaseType), ) create_parser.add_argument( "--release-version", help=( "Will release changelog as version. " "Default: lookup version in project definition." ), action=ReleaseVersionAction, ) create_parser.add_argument( "--release-series", help="Create a release for a release series. Setting a release series " "is required if the latest tag version is newer then the to be " 'released version. Examples: "1.2", "2", "22.4"', ) create_parser.add_argument( "--last-release-version", help="Optional last release version. Will be determined if not set.", type=str, ) next_version_group = create_parser.add_mutually_exclusive_group() next_version_group.add_argument( "--next-version", help=( "Sets the next version in project definition " "after the release. Default: set to next dev version" ), ) next_version_group.add_argument( "--no-next-version", help="Don't set a next version after the release.", dest="next_version", action="store_false", ) create_parser.add_argument( "--git-remote-name", help="The git remote name to push the commits and tag to", ) create_parser.add_argument( "--git-tag-prefix", default=DEFAULT_TAG_PREFIX, const="", nargs="?", help="Prefix for git tag versions. Default: %(default)s", ) create_parser.add_argument( "--git-signing-key", help="The key to sign the commits and tag for a release", default=os.environ.get("GPG_SIGNING_KEY"), ) create_parser.add_argument( "--repository", help="GitHub repository name (owner/name) where to publish the new " "release. For example octocat/Hello-World", type=repository_type, ) create_parser.add_argument( "--local", action="store_true", help="Only create release changes locally and do not upload them to a " "remote repository. Also do not create a GitHub release.", ) create_parser.add_argument( "--update-project", help="Update version in project files like pyproject.toml. By default " "project files are updated.", action=BooleanOptionalAction, default=True, ) create_parser.add_argument( "--github-pre-release", help="Enforce uploading a release as GitHub pre-release. ", action="store_true", ) changelog_parser = create_parser.add_mutually_exclusive_group() changelog_parser.add_argument( "--conventional-commits-config", dest="cc_config", type=Path, help="Conventional commits config file (toml), including conventions." " If not set defaults are used.", ) changelog_parser.add_argument( "--changelog", type=Path, help="Read the release notes from the given file.", ) def add_sign_parser( subparsers: argparse._SubParsersAction, ) -> None: sign_parser: ArgumentParser = subparsers.add_parser( "sign", help="Create signatures for an existing release", description="Create signatures for an existing release", ) sign_parser.set_defaults(func=sign) sign_parser.add_argument( "--signing-key", default=DEFAULT_SIGNING_KEY, help="The key to sign zip, tarballs of a release. Default %(default)s.", ) sign_parser.add_argument( "--versioning-scheme", help="Versioning scheme to use for parsing and handling version " f"information. Choices are {', '.join(VERSIONING_SCHEMES.keys())}. " "Default: %(default)s", default="pep440", type=versioning_scheme_argument_type, ) sign_parser.add_argument( "--release-version", help="Will release changelog as version. Must be PEP 440 compliant.", ) sign_parser.add_argument( "--release-series", help="Sign release files for a release series. Setting a release " "series is required if the latest tag version is newer then the to be " 'signed version. Examples: "1.2", "2", "22.4"', ) sign_parser.add_argument( "--git-tag-prefix", default="v", const="", nargs="?", help="Prefix for git tag versions. Default: %(default)s", ) sign_parser.add_argument( "--repository", help="GitHub repository name (owner/name) where to download the " "release files from. For example octocat/Hello-World", type=repository_type, ) sign_parser.add_argument( "--passphrase", help=( "Use gpg in a headless mode e.g. for " "the CI and use this passphrase for signing." ), ) sign_parser.add_argument( "--dry-run", action="store_true", help="Do not upload signed files." ) def add_show_parser( subparsers: argparse._SubParsersAction, ) -> None: show_parser: ArgumentParser = subparsers.add_parser( "show", help="Show release information about the current release version and " "determine the next release version", description="Show release information about the current release " "version and determine the next release version", ) show_parser.set_defaults(func=show) show_parser.add_argument( "--versioning-scheme", help="Versioning scheme to use for parsing and handling version " f"information. Choices are {', '.join(VERSIONING_SCHEMES.keys())}. " "Default: %(default)s", default="pep440", type=versioning_scheme_argument_type, ) show_parser.add_argument( "--release-type", help="Select the release type for calculating the release version. " f"Possible choices are: {to_choices(ReleaseType)}.", type=enum_type(ReleaseType), choices=enum_choice(ReleaseType), ) show_parser.add_argument( "--release-version", help=( "Will release changelog as version. " "Default: lookup version in project definition." ), action=ReleaseVersionAction, ) show_parser.add_argument( "--release-series", help="Create a release for a release series. Setting a release series " "is required if the latest tag version is newer then the to be " 'released version. Examples: "1.2", "2", "22.4"', ) show_parser.add_argument( "--git-tag-prefix", default="v", const="", nargs="?", help="Prefix for git tag versions. Default: %(default)s", ) show_parser.add_argument( "--output-format", help="Print in the desired output format. " f"Possible choices are: {to_choices(OutputFormat)}.", type=enum_type(OutputFormat), choices=enum_choice(OutputFormat), ) def parse_args( args: Optional[Sequence[str]] = None, ) -> Tuple[Optional[str], Optional[str], Namespace]: """ Return user, token, parsed arguments """ parser = ArgumentParser( description="Release handling utility.", prog="pontos-release", ) shtab.add_argument_to(parser) parser.add_argument( "--quiet", "-q", action="store_true", help="Don't print messages to the terminal", ) subparsers = parser.add_subparsers( title="subcommands", description="Valid subcommands", help="Additional help", dest="command", required=True, ) add_create_parser(subparsers) add_sign_parser(subparsers) add_show_parser(subparsers) parsed_args = parser.parse_args(args) scheme: type[VersioningScheme] = getattr( parsed_args, "versioning_scheme", PEP440VersioningScheme ) if parsed_args.func in (create_release, show): # check for release-type if not getattr(parsed_args, "release_type", None): parser.error("--release-type is required.") if ( getattr(parsed_args, "release_version", None) and parsed_args.release_type != ReleaseType.VERSION ): parser.error( "--release-version requires --release-type " f"{ReleaseType.VERSION.value}" ) if parsed_args.release_type == ReleaseType.VERSION and not getattr( parsed_args, "release_version", None ): parser.error( f"--release-type {ReleaseType.VERSION.value} requires to set " "--release-version" ) next_version = getattr(parsed_args, "next_version", None) if next_version: parsed_args.next_version = scheme.parse_version(next_version) release_version = getattr(parsed_args, "release_version", None) if release_version: parsed_args.release_version = scheme.parse_version(release_version) last_release_version = getattr(parsed_args, "last_release_version", None) if last_release_version: parsed_args.last_release_version = scheme.parse_version( last_release_version ) token = os.environ.get("GITHUB_TOKEN") user = os.environ.get("GITHUB_USER") return user, token, parsed_args pontos-25.3.2/pontos/release/command.py000066400000000000000000000035641476255566300201120ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later import asyncio from abc import ABC, abstractmethod from typing import Any, Optional, SupportsInt from pontos.terminal import NullTerminal, Terminal class Command(ABC): """Base class for release related command""" def __init__( self, *, terminal: Optional[Terminal] = None, error_terminal: Optional[Terminal] = None, ) -> None: self.terminal = terminal or NullTerminal() self.error_terminal = error_terminal or NullTerminal() def print_error(self, *messages: Any, **kwargs: Any) -> None: """Print an error to the error terminal""" self.error_terminal.error(*messages, **kwargs) def print_warning(self, *messages: Any, **kwargs: Any) -> None: """Print a warning to the error console""" self.error_terminal.warning(*messages, **kwargs) @abstractmethod def run(self, **kwargs: Any) -> SupportsInt: """Run the command""" class AsyncCommand(Command): """Base class for release related commands using asyncio""" def run(self, **kwargs: Any) -> SupportsInt: """ Run the command using asyncio MUST NOT be called when an asyncio event loop is already running. """ return asyncio.run(self.async_run(**kwargs)) @abstractmethod async def async_run(self, **kwargs: Any) -> SupportsInt: """ Run the async command Gets called via run. Alternatively use similar code as the example. Example: .. code-block:: python import asyncio async def main(): cmd = MyAsyncCommand() task = asyncio.create_task(cmd.async_run(arg1, arg2)) ... await task asyncio.run(main()) """ pontos-25.3.2/pontos/release/create.py000066400000000000000000000376751476255566300177510ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2020-2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # from argparse import Namespace from dataclasses import dataclass from enum import IntEnum, auto from pathlib import Path from typing import Literal, Optional, SupportsInt, Union import httpx from pontos.changelog.conventional_commits import ChangelogBuilder from pontos.errors import PontosError from pontos.git import Git, ResetMode from pontos.git._git import DEFAULT_TAG_PREFIX from pontos.github.actions.core import ActionIO from pontos.github.api import GitHubAsyncRESTApi from pontos.release.command import AsyncCommand from pontos.terminal import Terminal from pontos.version import Version, VersionError from pontos.version.helper import get_last_release_version from pontos.version.project import Project from pontos.version.schemes import VersioningScheme from .helper import ( ReleaseType, find_signing_key, get_next_release_version, repository_split, ) @dataclass class ReleaseInformation: last_release_version: Optional[Version] release_version: Version git_release_tag: str next_version: Optional[Version] def write_github_output(self): with ActionIO.out() as output: output.write( "last-release-version", self.last_release_version or "" ) output.write("release-version", self.release_version) output.write("git-release-tag", self.git_release_tag) output.write("next-version", self.next_version or "") class CreateReleaseReturnValue(IntEnum): """ Possible return values of ReleaseCommand """ SUCCESS = 0 PROJECT_SETTINGS_NOT_FOUND = auto() TOKEN_MISSING = auto() NO_LAST_RELEASE_VERSION = auto() NO_RELEASE_VERSION = auto() ALREADY_TAKEN = auto() CREATE_RELEASE_ERROR = auto() UPDATE_VERSION_ERROR = auto() UPDATE_VERSION_AFTER_RELEASE_ERROR = auto() INVALID_REPOSITORY = auto() class CreateReleaseCommand(AsyncCommand): """ A CLI command for creating a release Args: terminal: A Terminal for output """ def __init__( self, *, terminal: Optional[Terminal] = None, error_terminal: Optional[Terminal] = None, git: Optional[Git] = None, ) -> None: super().__init__(terminal=terminal, error_terminal=error_terminal) self.git = git or Git() def _has_tag(self, git_version: str) -> bool: git_tags = self.git.list_tags() return git_version in git_tags def _create_changelog( self, release_version: Version, last_release_version: Optional[Version], cc_config: Optional[Path], ) -> str: changelog_builder = ChangelogBuilder( repository=self.repository, config=cc_config, git_tag_prefix=self.git_tag_prefix, ) return changelog_builder.create_changelog( last_version=( last_release_version.parsed_version if last_release_version else None ), next_version=release_version, ) async def _create_release( self, release_version: Version, token: str, release_text: str, github_pre_release: bool, ) -> None: github = GitHubAsyncRESTApi(token=token) git_version = f"{self.git_tag_prefix}{release_version}" await github.releases.create( self.repository, git_version, name=f"{self.project_name} {release_version}", body=release_text, prerelease=release_version.is_pre_release or github_pre_release, ) def _get_last_release_version( self, *, versioning_scheme: VersioningScheme, release_type: ReleaseType, release_series: Optional[str] = None, ) -> Optional[Version]: try: return get_last_release_version( git=self.git, parse_version=versioning_scheme.parse_version, git_tag_prefix=self.git_tag_prefix, tag_name=( f"{self.git_tag_prefix}{release_series}.*" if release_series else None ), # include changes from pre-releases in release changelog for # non pre-release changes ignore_pre_releases=release_type not in [ ReleaseType.ALPHA, ReleaseType.BETA, ReleaseType.RELEASE_CANDIDATE, ] # but not when using a release series because then we might not # be able to determine the last release if there are only # pre-releases in the series yet and not release_series, ) except PontosError as e: self.print_warning(f"Could not determine last release version. {e}") return None async def async_run( # type: ignore[override] self, *, token: str, repository: str, versioning_scheme: VersioningScheme, release_type: ReleaseType, release_version: Optional[Version] = None, last_release_version: Optional[Version] = None, next_version: Union[Version, Literal[False], None] = None, git_signing_key: Optional[str] = None, git_remote_name: Optional[str] = None, git_tag_prefix: Optional[str] = DEFAULT_TAG_PREFIX, cc_config: Optional[Path] = None, local: bool = False, release_series: Optional[str] = None, update_project: bool = True, github_pre_release: bool = False, changelog: Optional[str] = None, ) -> CreateReleaseReturnValue: """ Create a release Args: token: A token for creating a release on GitHub repository: GitHub repository (owner/name) versioning_scheme: The versioning scheme to use for version parsing and calculation release_type: Type of the release to prepare. Defines the release version. PATCH increments the bugfix version. CALENDAR creates a new CalVer release version. VERSION uses the provided release_version. release_version: Optional release version to use. If not set the to be released version will be determined from the project. last_release_version: Optional last release version to use. If not set the last release version will be determined from the project. next_version: Optional version to set after the release. If set to None the next development version will be set. If set to False the version will not be changed after the release. Default is to update to the next development version. git_signing_key: A GPG key ID to use for creating signatures. git_remote_name: Name of the git remote to use. git_tag_prefix: An optional prefix to use for creating a git tag from the release version. cc_config: A path to a settings file for creating conventional commits. local: Only create changes locally and don't push changes to remote repository. Also don't create a GitHub release. release_series: Optional release series to use. For example: "1.2", "2", "23". update_project: Update version in project files. github_pre_release: Enforce uploading a release as a GitHub pre release changelog: An optional changelog. If not set a changelog will be gathered from the git commits since the last release. """ self.git_tag_prefix = git_tag_prefix or "" self.repository = repository try: self.project_name = repository_split(repository)[1] except (ValueError, IndexError) as e: self.print_error( f"Invalid repository format. Should be /. Error was {e}" ) return CreateReleaseReturnValue.INVALID_REPOSITORY self.terminal.info(f"Using versioning scheme {versioning_scheme.name}") if not last_release_version: last_release_version = self._get_last_release_version( versioning_scheme=versioning_scheme, release_type=release_type, release_series=release_series, ) if not last_release_version: if not release_version: self.print_error( "Unable to determine last release version." ) return CreateReleaseReturnValue.NO_LAST_RELEASE_VERSION else: self.terminal.info( f"Creating the initial release {release_version}" ) if last_release_version: self.terminal.info(f"Last release is {last_release_version}") calculator = versioning_scheme.calculator() try: release_version = get_next_release_version( last_release_version=last_release_version, calculator=calculator, release_type=release_type, release_version=release_version, ) except VersionError as e: self.print_error(f"Unable to determine release version. {e}") return CreateReleaseReturnValue.NO_RELEASE_VERSION self.terminal.info(f"Preparing the release {release_version}") git_version = f"{self.git_tag_prefix}{release_version}" if self._has_tag(git_version): self.print_error(f"Git tag {git_version} already exists.") return CreateReleaseReturnValue.ALREADY_TAKEN if update_project: try: project = Project(versioning_scheme) except PontosError as e: self.print_error(f"Unable to determine project settings. {e}") return CreateReleaseReturnValue.PROJECT_SETTINGS_NOT_FOUND try: updated = project.update_version(release_version) self.terminal.ok(f"Updated version to {release_version}") for path in updated.changed_files: self.terminal.info(f"Adding changes of {path}") self.git.add(path) except VersionError as e: self.terminal.error( f"Unable to update version to {release_version}. {e}" ) return CreateReleaseReturnValue.UPDATE_VERSION_ERROR if last_release_version: self.terminal.info( f"Creating changelog for {release_version} since " f"{last_release_version}" ) else: self.terminal.info( f"Creating changelog for {release_version} as initial release." ) if changelog: release_text = changelog else: release_text = self._create_changelog( release_version, last_release_version, cc_config ) commit_msg = f"Automatic release to {release_version}" git_signing_key = ( git_signing_key if git_signing_key is not None else find_signing_key(self.terminal) ) # check if files have been modified and create a commit status = list(self.git.status()) if status: self.terminal.info("Committing changes") self.git.commit( commit_msg, verify=False, gpg_signing_key=git_signing_key ) self.terminal.info(f"Creating tag {git_version}") self.git.tag( git_version, gpg_key_id=git_signing_key, message=commit_msg ) if not local: self.terminal.info("Pushing changes") self.git.push(follow_tags=True, remote=git_remote_name) try: self.terminal.info(f"Creating release for {release_version}") await self._create_release( release_version, token, release_text, github_pre_release, ) self.terminal.ok(f"Created release {release_version}") except httpx.HTTPStatusError as e: self.print_error(str(e)) # revert commit and tag self.git.delete_tag(git_version) self.git.push(git_version, delete=True, remote=git_remote_name) self.git.reset("HEAD^", mode=ResetMode.HARD) self.git.push(force=True, remote=git_remote_name) return CreateReleaseReturnValue.CREATE_RELEASE_ERROR if next_version is None: next_version = calculator.next_dev_version(release_version) if next_version: if update_project: try: updated = project.update_version(next_version) self.terminal.ok( f"Updated version after release to {next_version}" ) except VersionError as e: self.print_error( f"Error while updating version after release. {e}" ) return ( CreateReleaseReturnValue.UPDATE_VERSION_AFTER_RELEASE_ERROR ) for f in updated.changed_files: self.terminal.info(f"Adding changes of {f}") self.git.add(f) # check if files have been modified and create a commit status = list(self.git.status()) if status: commit_msg = f"""Automatic adjustments after release [skip ci] * Update to version {next_version} """ self.terminal.info("Committing changes after release") self.git.commit( commit_msg, verify=False, gpg_signing_key=git_signing_key ) if not local: self.terminal.info("Pushing changes") self.git.push(follow_tags=True, remote=git_remote_name) self.release_information = ReleaseInformation( last_release_version=last_release_version, release_version=release_version, git_release_tag=git_version, next_version=next_version or None, ) if ActionIO.has_output(): self.release_information.write_github_output() return CreateReleaseReturnValue.SUCCESS def create_release( args: Namespace, *, token: str, terminal: Terminal, error_terminal: Terminal, **_kwargs, ) -> SupportsInt: if not token: error_terminal.error( "Token is missing. The GitHub token is required to create a " "release." ) return CreateReleaseReturnValue.TOKEN_MISSING changelog_file: Path = args.changelog if changelog_file and not changelog_file.exists(): error_terminal.error(f"Changelog file {changelog_file} does not exist.") return CreateReleaseReturnValue.CREATE_RELEASE_ERROR return CreateReleaseCommand( terminal=terminal, error_terminal=error_terminal ).run( token=token, repository=args.repository, versioning_scheme=args.versioning_scheme, release_type=args.release_type, release_version=args.release_version, last_release_version=args.last_release_version, next_version=args.next_version, git_remote_name=args.git_remote_name, git_signing_key=args.git_signing_key, git_tag_prefix=args.git_tag_prefix, cc_config=args.cc_config, local=args.local, release_series=args.release_series, update_project=args.update_project, github_pre_release=args.github_pre_release, changelog=changelog_file.read_text() if changelog_file else None, ) pontos-25.3.2/pontos/release/helper.py000066400000000000000000000102351476255566300177440ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2020-2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # from typing import Optional from pontos.enum import StrEnum from pontos.git import Git, GitError from pontos.terminal import Terminal from pontos.version import Version, VersionCalculator, VersionError DEFAULT_TIMEOUT = 1000 DEFAULT_CHUNK_SIZE = 4096 class ReleaseType(StrEnum): """ Type of the release. Used to determine the next release version. Attributes: PATCH: A patch version release (1.2.x) CALENDAR: A calendar versioning release (year.month.X) VERSION: The version is explicitly set MAJOR: A major version release (x.0.0) MINOR: A minor version release (1.x.0) ALPHA: A alpha version release BETA: A beta version release RELEASE_CANDIDATE: A release candidate """ PATCH = "patch" CALENDAR = "calendar" VERSION = "version" MAJOR = "major" MINOR = "minor" ALPHA = "alpha" BETA = "beta" RELEASE_CANDIDATE = "release-candidate" def get_git_repository_name( remote: str = "origin", ) -> str: """Get the git repository name Arguments: remote: the remote to look up the name (str) default: origin Returns: The git project name """ ret = Git().remote_url(remote) return ret.rsplit("/", maxsplit=1)[-1].replace(".git", "").strip() def find_signing_key(terminal: Terminal) -> str: """Find the signing key in the config Arguments: terminal: The terminal for console output Returns: git signing key or empty string """ try: return Git().config("user.signingkey").strip() except GitError as e: # The command `git config user.signingkey` returns # return code 1 if no key is set. # So we will return empty string ... if e.returncode == 1: terminal.warning("No signing key found.") return "" def get_next_release_version( *, last_release_version: Optional[Version], calculator: type[VersionCalculator], release_type: ReleaseType, release_version: Optional[Version], ) -> Version: if release_version: if release_type and release_type != ReleaseType.VERSION: raise VersionError( f"Invalid release type {release_type.value} when setting " "release version explicitly. Use release type version instead." ) return release_version else: if not release_type or release_type == ReleaseType.VERSION: raise VersionError( "No release version provided. Either use a different release " "type or provide a release version." ) if not last_release_version: raise VersionError( "No last release version found for release type " f"{release_type.value}. Either check the project setup or set a " "release version explicitly." ) if release_type == ReleaseType.CALENDAR: return calculator.next_calendar_version(last_release_version) if release_type == ReleaseType.PATCH: return calculator.next_patch_version(last_release_version) if release_type == ReleaseType.MINOR: return calculator.next_minor_version(last_release_version) if release_type == ReleaseType.MAJOR: return calculator.next_major_version(last_release_version) if release_type == ReleaseType.ALPHA: return calculator.next_alpha_version(last_release_version) if release_type == ReleaseType.BETA: return calculator.next_beta_version(last_release_version) if release_type == ReleaseType.RELEASE_CANDIDATE: return calculator.next_release_candidate_version(last_release_version) raise VersionError(f"Unsupported release type {release_type.value}.") def repository_split(repository: str) -> tuple[str, str]: """ Split a GitHub repository (owner/name) into a space, project tuple """ splitted_repo = repository.split("/") if len(splitted_repo) != 2: raise ValueError( f"Invalid repository {repository}. Format must be " "owner/name." ) return splitted_repo[0], splitted_repo[1] pontos-25.3.2/pontos/release/main.py000066400000000000000000000031761476255566300174170ustar00rootroot00000000000000# pontos/release/release.py # SPDX-FileCopyrightText: 2020-2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later import logging import subprocess import sys from typing import NoReturn from pontos.git import GitError from pontos.terminal.null import NullTerminal from pontos.terminal.rich import RichTerminal from ._parser import parse_args def main( args=None, ) -> NoReturn: username, token, parsed_args = parse_args(args) logging.basicConfig(format="%(levelname)s - %(name)s - %(message)s") if parsed_args.quiet: term = NullTerminal() error_terminal = NullTerminal() logging.disable() else: term = RichTerminal() # type: ignore[assignment] error_terminal = RichTerminal(file=sys.stderr) # type: ignore[assignment] # noqa: E501 try: retval = parsed_args.func( parsed_args, terminal=term, error_terminal=error_terminal, username=username, token=token, ) sys.exit(int(retval)) except KeyboardInterrupt: sys.exit(1) except GitError as e: error_terminal.error(f'Could not run git command "{e.cmd}".') error = e.stderr if e.stderr else e.stdout error_terminal.print(f"Output was: {error}") sys.exit(1) except subprocess.CalledProcessError as e: if "--passphrase" not in e.cmd: error_terminal.error(f'Could not run command "{e.cmd}".') else: error_terminal.error("Headless signing failed.") error_terminal.print(f"Error was: {e.stderr}") sys.exit(1) if __name__ == "__main__": main() pontos-25.3.2/pontos/release/show.py000066400000000000000000000147611476255566300174550ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later import json from argparse import Namespace from enum import IntEnum, auto from typing import Optional from pontos.enum import StrEnum from pontos.errors import PontosError from pontos.git import Git from pontos.github.actions import ActionIO from pontos.release.command import Command from pontos.release.helper import ReleaseType, get_next_release_version from pontos.terminal import Terminal from pontos.typing import SupportsStr from pontos.version import Version, VersionError from pontos.version.helper import get_last_release_version from pontos.version.schemes import VersioningScheme class ShowReleaseReturnValue(IntEnum): """ Possible return values of ReleaseCommand """ SUCCESS = 0 NO_LAST_RELEASE_VERSION = auto() NO_RELEASE_VERSION = auto() class OutputFormat(StrEnum): ENV = "env" JSON = "json" GITHUB_ACTION = "github-action" class ShowReleaseCommand(Command): def __init__(self, *, terminal: Terminal, error_terminal: Terminal) -> None: super().__init__(terminal=terminal, error_terminal=error_terminal) self.git = Git() def run( # type: ignore[override] self, *, output_format: OutputFormat = OutputFormat.ENV, versioning_scheme: VersioningScheme, release_type: ReleaseType, release_version: Optional[Version], release_series: Optional[str] = None, git_tag_prefix: Optional[str] = None, ) -> int: git_tag_prefix = git_tag_prefix or "" try: last_release_version = get_last_release_version( parse_version=versioning_scheme.parse_version, git_tag_prefix=git_tag_prefix, tag_name=( f"{git_tag_prefix}{release_series}.*" if release_series else None ), ) except PontosError as e: last_release_version = None self.print_warning(f"Could not determine last release version. {e}") if not last_release_version and not release_version: self.print_error("Unable to determine last release version.") return ShowReleaseReturnValue.NO_LAST_RELEASE_VERSION calculator = versioning_scheme.calculator() try: release_version = get_next_release_version( last_release_version=last_release_version, calculator=calculator, release_type=release_type, release_version=release_version, ) except VersionError as e: self.print_error(f"Unable to determine release version. {e}") return ShowReleaseReturnValue.NO_RELEASE_VERSION if last_release_version: last_release_version_dict = { "last_release_version": str(last_release_version), "last_release_version_major": last_release_version.major, "last_release_version_minor": last_release_version.minor, "last_release_version_patch": last_release_version.patch, } else: last_release_version_dict = { "last_release_version": "", "last_release_version_major": "", "last_release_version_minor": "", "last_release_version_patch": "", } if output_format == OutputFormat.JSON: release_dict = { "release_version": str(release_version), "release_version_major": release_version.major, "release_version_minor": release_version.minor, "release_version_patch": release_version.patch, } release_dict.update(last_release_version_dict) self.terminal.print(json.dumps(release_dict, indent=2)) elif output_format == OutputFormat.GITHUB_ACTION: with ActionIO.out() as output: output.write( "last_release_version", last_release_version_dict["last_release_version"], ) output.write( "last_release_version_major", last_release_version_dict["last_release_version_major"], ) output.write( "last_release_version_minor", last_release_version_dict["last_release_version_minor"], ) output.write( "last_release_version_patch", last_release_version_dict["last_release_version_patch"], ) output.write("release_version_major", release_version.major) output.write("release_version_minor", release_version.minor) output.write("release_version_patch", release_version.patch) output.write("release_version", release_version) else: self.terminal.print( "LAST_RELEASE_VERSION=" f"{last_release_version_dict['last_release_version']}" ) self.terminal.print( "LAST_RELEASE_VERSION_MAJOR=" f"{last_release_version_dict['last_release_version_major']}" ) self.terminal.print( "LAST_RELEASE_VERSION_MINOR=" f"{last_release_version_dict['last_release_version_minor']}" ) self.terminal.print( "LAST_RELEASE_VERSION_PATCH=" f"{last_release_version_dict['last_release_version_patch']}" ) self.terminal.print(f"RELEASE_VERSION={release_version}") self.terminal.print( f"RELEASE_VERSION_MAJOR={release_version.major}" ) self.terminal.print( f"RELEASE_VERSION_MINOR={release_version.minor}" ) self.terminal.print( f"RELEASE_VERSION_PATCH={release_version.patch}" ) return ShowReleaseReturnValue.SUCCESS def show( args: Namespace, terminal: Terminal, error_terminal: Terminal, **_kwargs, ) -> SupportsStr: return ShowReleaseCommand( terminal=terminal, error_terminal=error_terminal, ).run( versioning_scheme=args.versioning_scheme, release_type=args.release_type, release_version=args.release_version, git_tag_prefix=args.git_tag_prefix, release_series=args.release_series, output_format=args.output_format, ) pontos-25.3.2/pontos/release/sign.py000066400000000000000000000300521476255566300174240ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2020-2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # import asyncio import hashlib import subprocess from argparse import Namespace from asyncio.subprocess import Process from enum import IntEnum, auto from os import PathLike from pathlib import Path from typing import AsyncContextManager, Optional, SupportsInt, Union import httpx from rich.progress import Progress as RichProgress from rich.progress import TextColumn from pontos.errors import PontosError from pontos.github.api import GitHubAsyncRESTApi from pontos.helper import AsyncDownloadProgressIterable from pontos.release.command import AsyncCommand from pontos.terminal import Terminal from pontos.version import Version from pontos.version.helper import get_last_release_version from pontos.version.schemes import VersioningScheme from .helper import repository_split class SignReturnValue(IntEnum): """ Possible return values of SignCommand """ SUCCESS = 0 TOKEN_MISSING = auto() NO_RELEASE_VERSION = auto() NO_RELEASE = auto() UPLOAD_ASSET_ERROR = auto() SIGNATURE_GENERATION_FAILED = auto() INVALID_REPOSITORY = auto() class SignatureError(PontosError): """ Error while creating a signature """ async def cmd_runner(*args: Union[str, PathLike[str]]) -> Process: return await asyncio.create_subprocess_exec( *args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, ) class SignCommand(AsyncCommand): """ A CLI command for signing a release Args: terminal: A Terminal for output """ async def _async_download_progress( self, rich_progress: RichProgress, progress: AsyncDownloadProgressIterable[bytes], destination: Path, ) -> None: with destination.open("wb") as f: task_description = f"Downloading [blue]{progress.url}" task_id = rich_progress.add_task( task_description, total=progress.length, sha256="", ) sha256 = hashlib.sha256() async for content, percent in progress: rich_progress.advance(task_id, percent or 1) f.write(content) sha256.update(content) rich_progress.update( task_id, total=1, completed=1, sha256=sha256.hexdigest() ) async def download_zip( self, rich_progress: RichProgress, github: GitHubAsyncRESTApi, destination: Path, repo: str, git_version: str, ) -> Path: async with github.releases.download_release_zip( repo, git_version ) as download: await self._async_download_progress( rich_progress, download, destination ) return destination async def download_tar( self, rich_progress: RichProgress, github: GitHubAsyncRESTApi, destination: Path, repo: str, git_version: str, ) -> Path: async with github.releases.download_release_tarball( repo, git_version ) as download: await self._async_download_progress( rich_progress, download, destination ) return destination async def download_asset( self, rich_progress: RichProgress, name: str, download_cm: AsyncContextManager[AsyncDownloadProgressIterable[bytes]], ) -> Path: file_path = Path(name) async with download_cm as iterator: await self._async_download_progress( rich_progress, iterator, file_path ) return file_path async def sign_file( self, file_path: Path, signing_key: str, passphrase: Optional[str] ) -> None: self.terminal.info(f"Signing {file_path}") if passphrase: process = await cmd_runner( "gpg", "--pinentry-mode", "loopback", "--default-key", signing_key, "--yes", "--detach-sign", "--passphrase", passphrase, "--armor", file_path, ) else: process = await cmd_runner( "gpg", "--default-key", signing_key, "--yes", "--detach-sign", "--armor", file_path, ) _, stderr = await process.communicate() if process.returncode: raise SignatureError( f"Could not create signature for {file_path}. " f"{stderr.decode(errors='replace')}" ) async def async_run( # type: ignore[override] self, *, token: str, repository: str, versioning_scheme: VersioningScheme, signing_key: str, passphrase: str, dry_run: Optional[bool] = False, git_tag_prefix: Optional[str], release_version: Optional[Version], release_series: Optional[str] = None, ) -> SignReturnValue: """ Sign a release Args: token: A token for creating a release on GitHub repository: GitHub repository (owner/name). Overrides space and project. versioning_scheme: The versioning scheme to use for version parsing and calculation dry_run: True to not upload the signature files git_tag_prefix: An optional prefix to use for handling a git tag from the release version. release_version: Optional release version to use. If not set the current version will be determined from the project. signing_key: A GPG key ID to use for creating signatures. passphrase: Passphrase for the signing key release_series: Optional release series to use. For example: "1.2", "2", "23". """ if not token and not dry_run: # dry run doesn't upload assets. therefore a token MAY NOT be # required # for public repositories. self.print_error( "Token is missing. The GitHub token is required to upload " "signature files." ) return SignReturnValue.TOKEN_MISSING self.terminal.info(f"Using versioning scheme {versioning_scheme.name}") try: _, project = repository_split(repository) except ValueError as e: self.print_error(str(e)) return SignReturnValue.INVALID_REPOSITORY try: release_version = ( release_version if release_version is not None else get_last_release_version( versioning_scheme.parse_version, git_tag_prefix=git_tag_prefix, tag_name=( f"{git_tag_prefix}{release_series}.*" if release_series else None ), ) ) except PontosError as e: self.print_error(f"Could not determine release version. {e}") return SignReturnValue.NO_RELEASE_VERSION if not release_version: return SignReturnValue.NO_RELEASE_VERSION git_version: str = f"{git_tag_prefix}{release_version}" async with GitHubAsyncRESTApi(token=token) as github: if not await github.releases.exists(repository, git_version): self.print_error( f"Release version {git_version} does not exist." ) return SignReturnValue.NO_RELEASE tasks = [] zip_destination = Path(f"{project}-{release_version}.zip") tarball_destination = Path(f"{project}-{release_version}.tar.gz") # terminal can be a NullTerminal here too that doesn't have a # progress. this needs to be fixed and the type ignore removed # afterwards with self.terminal.progress( # type: ignore[attr-defined] additional_columns=[ TextColumn("[progress.description]{task.fields[sha256]}"), ] ) as rich_progress: tasks.append( asyncio.create_task( self.download_zip( rich_progress, github, zip_destination, repository, git_version, ) ) ) tasks.append( asyncio.create_task( self.download_tar( rich_progress, github, tarball_destination, repository, git_version, ) ) ) # pylint: disable=line-too-long async for ( name, download_cm, ) in github.releases.download_release_assets( # noqa: E501 repository, git_version, ): tasks.append( asyncio.create_task( self.download_asset( rich_progress, name, download_cm ) ) ) file_paths = await asyncio.gather(*tasks) tasks = [ asyncio.create_task( self.sign_file(file_path, signing_key, passphrase) ) for file_path in file_paths ] done, pending = await asyncio.wait( tasks, return_when=asyncio.FIRST_EXCEPTION ) has_error = False for task in done: try: await task except (asyncio.CancelledError, asyncio.InvalidStateError): pass except SignatureError as e: self.print_error(e) has_error = True for task in pending: # we had an error try: task.cancel() await task except asyncio.CancelledError: pass if has_error: return SignReturnValue.SIGNATURE_GENERATION_FAILED if dry_run: return SignReturnValue.SUCCESS upload_files = [ (Path(f"{str(p)}.asc"), "application/pgp-signature") for p in file_paths ] self.terminal.info( f"Uploading assets: {[str(p[0]) for p in upload_files]}" ) try: # pylint: disable=line-too-long async for ( uploaded_file ) in github.releases.upload_release_assets( # noqa: E501 repository, git_version, upload_files ): self.terminal.ok(f"Uploaded: {uploaded_file}") except httpx.HTTPStatusError as e: self.print_error(f"Failed uploading asset {e}.") return SignReturnValue.UPLOAD_ASSET_ERROR return SignReturnValue.SUCCESS def sign( args: Namespace, *, terminal: Terminal, error_terminal: Terminal, token: Optional[str], **_kwargs, ) -> SupportsInt: return SignCommand(terminal=terminal, error_terminal=error_terminal).run( token=token, dry_run=args.dry_run, repository=args.repository, versioning_scheme=args.versioning_scheme, git_tag_prefix=args.git_tag_prefix, release_version=args.release_version, signing_key=args.signing_key, passphrase=args.passphrase, release_series=args.release_series, ) pontos-25.3.2/pontos/terminal/000077500000000000000000000000001476255566300163055ustar00rootroot00000000000000pontos-25.3.2/pontos/terminal/__init__.py000066400000000000000000000004061476255566300204160ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2021-2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # from .null import NullTerminal from .rich import RichTerminal from .terminal import Terminal __all__ = ( "Terminal", "NullTerminal", "RichTerminal", ) pontos-25.3.2/pontos/terminal/null.py000066400000000000000000000021771476255566300176400ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # from contextlib import contextmanager from typing import Any, Generator from pontos.helper import DownloadProgressIterable from .terminal import Terminal class NullTerminal(Terminal): """A terminal implementation to keep the terminal quiet""" @contextmanager def indent(self, indentation: int = 4) -> Generator[None, None, None]: yield def out(self, *messages: Any, **kwargs: Any) -> None: pass def print(self, *messages: Any, **kwargs: Any) -> None: pass def ok(self, *messages: Any, **kwargs: Any) -> None: pass def fail(self, *messages: Any, **kwargs: Any) -> None: pass def error(self, *messages: Any, **kwargs: Any) -> None: pass def warning(self, *messages: Any, **kwargs: Any) -> None: pass def info(self, *messages: Any, **kwargs: Any) -> None: pass def bold_info(self, *messages: Any, **kwargs: Any) -> None: pass def download_progress(self, progress: DownloadProgressIterable) -> None: progress.run() pontos-25.3.2/pontos/terminal/rich.py000066400000000000000000000111501476255566300176020ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # from typing import IO, Any, Callable, Iterable, Optional from rich.console import Console, RenderableType from rich.padding import Padding from rich.progress import ( BarColumn, Progress, ProgressColumn, SpinnerColumn, Task, TaskProgressColumn, TextColumn, ) from pontos.helper import DownloadProgressIterable from .terminal import Signs, Terminal def red(text: str) -> str: return f"[red]{text}[/red]" def yellow(text: str) -> str: return f"[yellow]{text}[/yellow]" def cyan(text: str) -> str: return f"[cyan]{text}[/cyan]" def green(text: str) -> str: return f"[green]{text}[/green]" def white(text: str) -> str: return f"[white]{text}[/white]" class PaddingColumn(ProgressColumn): def __init__(self, indent: int, table_column=None): self._padding = Padding.indent("", indent) super().__init__(table_column=table_column) def render(self, task: Task) -> RenderableType: return self._padding class RichTerminal(Terminal): """ A Terminal based on `rich `_. """ def __init__( self, file: Optional[IO[str]] = None, ) -> None: """ Create a new RichTerminal Args: file: A file object where the output should write to. Default is stdout. """ super().__init__() self._console = Console(file=file) def _indent_message(self): return " " * self._indent def _print_status( self, *messages: Any, status: Signs, color: Callable, **kwargs: Any, ): self._console.print( self._indent_message(), color(status), *messages, **kwargs ) def get_progress_default_columns(self) -> Iterable[ProgressColumn]: return ( PaddingColumn(self._indent), SpinnerColumn(), TextColumn("[progress.description]{task.description}"), BarColumn(), TaskProgressColumn(), ) def progress( self, *, columns: Optional[Iterable[ProgressColumn]] = None, additional_columns: Optional[Iterable[ProgressColumn]] = None, **kwargs, ) -> Progress: kwargs["console"] = self._console columns = columns or self.get_progress_default_columns() if additional_columns: columns = *columns, *additional_columns return Progress( *columns, **kwargs, ) def out(self, *messages: Any, **kwargs: Any) -> None: kwargs["highlight"] = False self._console.out(self._indent_message(), *messages, **kwargs) def print(self, *messages: Any, **kwargs: Any) -> None: self._console.print(self._indent_message(), *messages, **kwargs) def ok(self, *messages: Any, **kwargs: Any) -> None: kwargs.update({"status": Signs.OK, "color": green}) self._print_status(*messages, **kwargs) def fail(self, *messages: Any, **kwargs: Any) -> None: kwargs.update({"status": Signs.FAIL, "color": red}) self._print_status(*messages, **kwargs) def error(self, *messages: Any, **kwargs: Any) -> None: kwargs.update({"status": Signs.ERROR, "color": red}) self._print_status(*messages, **kwargs) def warning(self, *messages: Any, **kwargs: Any) -> None: kwargs.update({"status": Signs.WARNING, "color": yellow}) self._print_status(*messages, **kwargs) def info(self, *messages: Any, **kwargs: Any) -> None: kwargs.update({"status": Signs.INFO, "color": cyan}) self._print_status(*messages, **kwargs) def bold_info(self, *messages: Any, **kwargs: Any) -> None: kwargs.update({"status": Signs.INFO, "color": cyan, "style": "bold"}) self._print_status(*messages, **kwargs) def download_progress(self, progress: DownloadProgressIterable) -> None: with self.progress() as rich_progress: task_description = f"Downloading [blue]{progress.url}" if progress.length: task_id = rich_progress.add_task( task_description, total=progress.length ) for percent in progress: rich_progress.advance(task_id, percent) # type: ignore[arg-type] # noqa: E501 else: task_id = rich_progress.add_task(task_description, total=None) for _ in progress: rich_progress.advance(task_id) rich_progress.update(task_id, total=1, completed=1) pontos-25.3.2/pontos/terminal/terminal.py000066400000000000000000000211301476255566300204670ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2019-2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # from abc import ABC, abstractmethod from contextlib import contextmanager from enum import Enum from pathlib import Path from shutil import get_terminal_size from typing import Any, Callable, Generator, Optional import colorful as cf # type: ignore from pontos.helper import DownloadProgressIterable TERMINAL_SIZE_FALLBACK = (80, 24) # use a small standard size as fallback class Signs(Enum): FAIL = "\N{HEAVY MULTIPLICATION X}" ERROR = "\N{MULTIPLICATION SIGN}" WARNING = "\N{WARNING SIGN}" OK = "\N{CHECK MARK}" INFO = "\N{INFORMATION SOURCE}" NONE = " " def __str__(self): return f"{self.value}" STATUS_LEN = 2 class Terminal(ABC): """ Abstract base class representing a terminal console """ def __init__(self) -> None: super().__init__() self._indent = 0 @contextmanager def indent(self, indentation: int = 4) -> Generator[None, None, None]: """ A context manager for indenting output using spaces Example: .. code-block:: python with terminal.indent(): terminal.print("...") Args: indentation: Number of spaces to be used for indentation. By default 4. """ current_indent = self._indent self._add_indent(indentation) yield self._indent = current_indent def _add_indent(self, indentation: int = 4) -> None: self._indent += indentation @abstractmethod def out(self, *messages: Any, **kwargs: Any) -> None: """ Print messages without formatting. Args: *messages: Arguments to print. **kwargs: Keyword arguments forwarded to the underlying implementation. """ @abstractmethod def print(self, *messages: Any, **kwargs: Any) -> None: """ Print messages. Possibly formatting is applied. Args: *messages: Arguments to print. **kwargs: Keyword arguments forwarded to the underlying implementation. """ @abstractmethod def ok(self, *messages: Any, **kwargs: Any) -> None: """ Print a success message. Possibly formatting is applied. Args: *messages: Arguments to print. **kwargs: Keyword arguments forwarded to the underlying implementation. """ @abstractmethod def fail(self, *messages: Any, **kwargs: Any) -> None: """ Print a failure message. Possibly formatting is applied. Args: *messages: Arguments to print. **kwargs: Keyword arguments forwarded to the underlying implementation. """ @abstractmethod def error(self, *messages: Any, **kwargs: Any) -> None: """ Print an error message. Possibly formatting is applied. Args: *messages: Arguments to print. **kwargs: Keyword arguments forwarded to the underlying implementation. """ @abstractmethod def warning(self, *messages: Any, **kwargs: Any) -> None: """ Print a warning message. Possibly formatting is applied. Args: *messages: Arguments to print. **kwargs: Keyword arguments forwarded to the underlying implementation. """ @abstractmethod def info(self, *messages: Any, **kwargs: Any) -> None: """ Print an info message. Possibly formatting is applied. Args: *messages: Arguments to print. **kwargs: Keyword arguments forwarded to the underlying implementation. """ @abstractmethod def bold_info(self, *messages: Any, **kwargs: Any) -> None: """ Print an info message with bold text. Possibly formatting is applied. Args: *messages: Arguments to print. **kwargs: Keyword arguments forwarded to the underlying implementation. """ @abstractmethod def download_progress(self, progress: DownloadProgressIterable) -> None: """ Display a download progress """ class ConsoleTerminal(Terminal): """ A simple Terminal using colorful internally for highlighting """ # Keep arguments for backwards compatibility but ignore them # pylint: disable=unused-argument def __init__(self, *, verbose: int = 1, log_file: Optional[Path] = None): super().__init__() @staticmethod def get_width() -> int: """ Get the width of the terminal window """ width, _ = get_terminal_size(TERMINAL_SIZE_FALLBACK) return width def _print_status( self, *messages: Any, status: Signs, color: Callable, style: Callable = cf.reset, new_line: bool = True, **kwargs: Any, ) -> None: width = self.get_width() offset = self._indent + STATUS_LEN usable_width = width - offset # deal with existing newlines, to avoid breaking the formatting # done by the terminal message = "".join(messages) processed_messages = message.split("\n") output = self._format_message( message=processed_messages[0], usable_width=usable_width, offset=offset, first=True, ) if len(processed_messages) > 0: for msg in processed_messages[1:]: output += "\n" output += self._format_message( message=msg, usable_width=usable_width, offset=offset, ) if new_line: print(style(f"{color(status)} {output}"), **kwargs) else: kwargs.update({"end": "", "flush": True}) print(style(f"{color(status)} {output}"), **kwargs) def _format_message( self, message: str, usable_width: int, offset: int, *, first: bool = False, ) -> str: formatted_message = "" if first: formatted_message += " " * self._indent else: formatted_message += " " * offset while usable_width < len(message): part = message[:usable_width] message = message[usable_width:] formatted_message += f"{part}" if len(message) > 0: formatted_message += f'\n{" " * offset}' formatted_message += f"{message}" return formatted_message def out(self, *messages: Any, **kwargs: Any) -> None: self.print(*messages, **kwargs) def print(self, *messages: Any, **kwargs: Any) -> None: kwargs.update({"status": Signs.NONE, "color": cf.white}) self._print_status(*messages, **kwargs) def ok(self, *messages: Any, **kwargs: Any) -> None: kwargs.update({"status": Signs.OK, "color": cf.green}) self._print_status(*messages, **kwargs) def fail(self, *messages: Any, **kwargs: Any) -> None: kwargs.update({"status": Signs.FAIL, "color": cf.red}) self._print_status(*messages, **kwargs) def error(self, *messages: Any, **kwargs: Any) -> None: kwargs.update({"status": Signs.ERROR, "color": cf.red}) self._print_status(*messages, **kwargs) def warning(self, *messages: Any, **kwargs: Any) -> None: kwargs.update({"status": Signs.WARNING, "color": cf.yellow}) self._print_status(*messages, **kwargs) def info(self, *messages: Any, **kwargs: Any) -> None: kwargs.update({"status": Signs.INFO, "color": cf.cyan}) self._print_status(*messages, **kwargs) def bold_info(self, *messages: Any, **kwargs: Any) -> None: kwargs.update( {"status": Signs.INFO, "color": cf.cyan, "style": cf.bold} ) self._print_status(*messages, **kwargs) def download_progress(self, progress: DownloadProgressIterable) -> None: spinner = ["-", "\\", "|", "/"] if progress.length: for percent in progress: done = int(50 * percent) if percent else 0 self.out( f"\r[{'=' * done}{' ' * (50-done)}]", end="", flush=True ) else: i = 0 for _ in progress: i = i + 1 if i == 4: i = 0 self.out(f"\r[{spinner[i]}]", end="", flush=True) self.out(f"\r[{Signs.OK}]{' ' * 50}", end="", flush=True) pontos-25.3.2/pontos/testing/000077500000000000000000000000001476255566300161475ustar00rootroot00000000000000pontos-25.3.2/pontos/testing/__init__.py000066400000000000000000000152341476255566300202650ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # """ A module containing classes and functions mostly useful for creating unit tests """ import os import tempfile from contextlib import contextmanager from pathlib import Path from typing import ( Any, AsyncIterator, Awaitable, Generator, Iterable, Optional, Union, ) from pontos.git._git import exec_git from pontos.helper import add_sys_path, ensure_unload_module, unload_module __all__ = ( "AsyncIteratorMock", "add_sys_path", "ensure_unload_module", "temp_directory", "temp_file", "temp_git_repository", "temp_python_module", "unload_module", ) @contextmanager def temp_directory( *, change_into: bool = False, add_to_sys_path: bool = False ) -> Generator[Path, None, None]: """ Context Manager to create a temporary directory Args: change_into: Set the created temporary as the current working directory. The behavior of the current working directory when leaving the context manager is undefined. add_to_sys_path: Add the created temporary directory to the directories for searching for Python modules Returns: A path to the created temporary directory Example: .. code-block:: python from pontos.testing import temp_directory with temp_directory(change_into=True) as tmp: new_file = tmp / "test.txt" """ temp_dir = tempfile.TemporaryDirectory() dir_path = Path(temp_dir.name) if change_into: try: old_cwd = Path.cwd() except FileNotFoundError: old_cwd = Path.home() os.chdir(dir_path) try: if add_to_sys_path: with add_sys_path(dir_path): yield Path(dir_path) else: yield Path(dir_path) finally: if change_into: try: os.chdir(old_cwd) finally: temp_dir.cleanup() else: temp_dir.cleanup() @contextmanager def temp_git_repository( *, user_name: str = "Max Mustermann", user_email: str = "max.mustermann@example.com", branch: str = "main", ) -> Generator[Path, None, None]: """ Context Manager to create a temporary git repository on the filesystem Args: user_name: User name to configure in the repository. Default: Max Mustermann user_email: Email address of the user to configure in the repository. Default: max.mustermann@example.com branch: Branch name to create. Default: main Returns: A path to the created temporary git repository directory Example: .. code-block:: python from pontos.testing import temp_git_repository with temp_git_repository() as repo: new_file = repo / "foo.txt" new_file.write_text("Lorem Ipsum") exec_git("add", "foo.txt") """ temp_dir = tempfile.TemporaryDirectory() temp_path = Path(temp_dir.name) try: old_cwd = Path.cwd() except FileNotFoundError: old_cwd = Path.home() os.chdir(temp_path) exec_git("init", "-b", branch) exec_git("config", "--local", "user.email", user_email) exec_git("config", "--local", "user.name", user_name) try: yield temp_path finally: try: os.chdir(old_cwd) finally: temp_dir.cleanup() @contextmanager def temp_file( content: Optional[Union[str, bytes]] = None, *, name: str = "test.toml", change_into: bool = False, ) -> Generator[Path, None, None]: """ A Context Manager to create a temporary file within a new temporary directory. The temporary file and directory are removed when the context is exited. Args: content: Content to write into the temporary file. name: Name of the temporary file. "test.toml" by default. change_into: Adjust the current working directory to the temporary directory. Returns: A path to the created temporary file Example: .. code-block:: python from pontos.testing import temp_file with temp_file("Lorem Ipsum", name="foo.txt") as fpath: """ with temp_directory(change_into=change_into) as tmp_dir: test_file = tmp_dir / name if content: if isinstance(content, bytes): test_file.write_bytes(content) else: test_file.write_text(content, encoding="utf8") else: test_file.touch() yield test_file @contextmanager def temp_python_module( content: str, *, name: str = "foo", change_into: bool = False ) -> Generator[Path, None, None]: """ A Context Manager to create a new Python module in a temporary directory. The temporary directory will be added to the module search path and removed from the search path when the context is exited. Also it is ensured that the module is unloaded if the context is exited. Args: content: Python code to write into the temporary module. name: Name of the new Python module. By default: "foo". change_into: Adjust the current working directory to the temporary directory. Returns: A path to the created temporary Python module file Example: .. code-block:: python from pontos.testing import temp_python_module with temp_python_module( "def hello(value):\\n print(f'Hello {value}')", name="world" ) as python_module_path: from world import hello hello("World") """ with ( temp_directory( add_to_sys_path=True, change_into=change_into ) as tmp_dir, ensure_unload_module(name), ): test_file = tmp_dir / f"{name}.py" test_file.write_text(content, encoding="utf8") yield test_file class AsyncIteratorMock(AsyncIterator): """ A class to mock an async iterator from an iterable like a list Args: iterable: Iterable to return values from Example: .. code-block:: python from pontos.testing import AsyncIteratorMock values = [1, 2, 3] mock = AsyncIteratorMock(values) async for value in mock: print(value) """ def __init__(self, iterable: Iterable[Any]) -> None: self.iterator = iter(iterable) async def __anext__(self) -> Awaitable[Any]: try: return next(self.iterator) except StopIteration: raise StopAsyncIteration() from None pontos-25.3.2/pontos/typing/000077500000000000000000000000001476255566300160045ustar00rootroot00000000000000pontos-25.3.2/pontos/typing/__init__.py000066400000000000000000000005211476255566300201130ustar00rootroot00000000000000# Copyright (C) 2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # from abc import abstractmethod from typing import Protocol, runtime_checkable @runtime_checkable class SupportsStr(Protocol): """ A protocol for classes supporting __str__ """ @abstractmethod def __str__(self) -> str: pass pontos-25.3.2/pontos/updateheader/000077500000000000000000000000001476255566300171255ustar00rootroot00000000000000pontos-25.3.2/pontos/updateheader/__init__.py000066400000000000000000000002251476255566300212350ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2021-2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # from .updateheader import main __all__ = ["main"] pontos-25.3.2/pontos/updateheader/__main__.py000066400000000000000000000003261476255566300212200ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2021-2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # # pylint: disable=invalid-name from pontos import updateheader if __name__ == "__main__": updateheader.main() pontos-25.3.2/pontos/updateheader/_parser.py000066400000000000000000000065571476255566300211470ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2024 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later from argparse import ArgumentParser, FileType, Namespace from datetime import datetime from typing import Optional, Sequence import shtab SUPPORTED_LICENSES = [ "AGPL-3.0-or-later", "GPL-2.0-only", "GPL-2.0-or-later", "GPL-3.0-or-later", ] def parse_args(args: Optional[Sequence[str]] = None) -> Namespace: """Parsing the args""" parser = ArgumentParser( description="Update copyright in source file headers.", ) shtab.add_argument_to(parser) parser.add_argument( "--quiet", "-q", action="store_true", help="Don't print messages to the terminal", ) parser.add_argument( "--log-file", dest="log_file", type=str, help="Activate logging using the given file path", ).complete = shtab.FILE # type: ignore[attr-defined] parser.add_argument( "-c", "--changed", action="store_true", default=False, help=( "Update modified year using git log modified year. " "Used instead of --year. If the modified year could not be " "determined via git it falls back to --year." ), ) parser.add_argument( "-y", "--year", default=str(datetime.now().year), help=( "If year is set, modified year will be " "set to the specified year. Default is %(default)s." ), ) parser.add_argument( "-l", "--license", dest="license_id", choices=SUPPORTED_LICENSES, default="GPL-3.0-or-later", help="Use the passed license type. Default is %(default)s", ) parser.add_argument( "--company", default="Greenbone AG", help=( "If a header will be added to file, " "it will be licensed by company. Default is %(default)s" ), ) files_group = parser.add_mutually_exclusive_group(required=True) files_group.add_argument( "-f", "--files", nargs="+", help="Files to update." ).complete = shtab.FILE # type: ignore[attr-defined] files_group.add_argument( "-d", "--directories", nargs="+", help="Directories to find files to update recursively.", ).complete = shtab.DIRECTORY # type: ignore[attr-defined] parser.add_argument( "--exclude-file", help=( "File containing glob patterns for files to " "ignore when finding files to update in a directory. " "Will look for '.pontos-header-ignore' in the directory " "if none is given. " "The ignore file should only contain relative paths like *.py," "not absolute as **/*.py" ), type=FileType("r"), ).complete = shtab.FILE # type: ignore[attr-defined] parser.add_argument( "--cleanup", action="store_true", default=False, help="Do a cleanup: Remove lines from outdated header format", ) parser.add_argument( "--single-year", action="store_true", default=False, help=( "If set, will format license headers in from-to year format " "into single (creation) year format. " "Default is %(default)s." ), ) return parser.parse_args(args) pontos-25.3.2/pontos/updateheader/templates/000077500000000000000000000000001476255566300211235ustar00rootroot00000000000000pontos-25.3.2/pontos/updateheader/templates/AGPL-3.0-or-later/000077500000000000000000000000001476255566300236675ustar00rootroot00000000000000pontos-25.3.2/pontos/updateheader/templates/AGPL-3.0-or-later/template.bash000066400000000000000000000001461476255566300263420ustar00rootroot00000000000000#!/bin/bash # SPDX-FileCopyrightText: # # SPDX-License-Identifier: AGPL-3.0-or-later pontos-25.3.2/pontos/updateheader/templates/AGPL-3.0-or-later/template.c000066400000000000000000000001411476255566300256420ustar00rootroot00000000000000/* SPDX-FileCopyrightText: * * SPDX-License-Identifier: AGPL-3.0-or-later */ pontos-25.3.2/pontos/updateheader/templates/AGPL-3.0-or-later/template.cmake000066400000000000000000000001321476255566300265000ustar00rootroot00000000000000# SPDX-FileCopyrightText: # # SPDX-License-Identifier: AGPL-3.0-or-later pontos-25.3.2/pontos/updateheader/templates/AGPL-3.0-or-later/template.go000066400000000000000000000001351476255566300260300ustar00rootroot00000000000000// SPDX-FileCopyrightText: // // SPDX-License-Identifier: AGPL-3.0-or-later pontos-25.3.2/pontos/updateheader/templates/AGPL-3.0-or-later/template.h000066400000000000000000000001411476255566300256470ustar00rootroot00000000000000/* SPDX-FileCopyrightText: * * SPDX-License-Identifier: AGPL-3.0-or-later */ pontos-25.3.2/pontos/updateheader/templates/AGPL-3.0-or-later/template.js000066400000000000000000000001411476255566300260340ustar00rootroot00000000000000/* SPDX-FileCopyrightText: * * SPDX-License-Identifier: AGPL-3.0-or-later */ pontos-25.3.2/pontos/updateheader/templates/AGPL-3.0-or-later/template.nasl000066400000000000000000000003401476255566300263560ustar00rootroot00000000000000# SPDX-FileCopyrightText: # Some text descriptions might be excerpted from (a) referenced # source(s), and are Copyright (C) by the respective right holder(s). # # SPDX-License-Identifier: AGPL-3.0-or-later pontos-25.3.2/pontos/updateheader/templates/AGPL-3.0-or-later/template.po000066400000000000000000000001321476255566300260360ustar00rootroot00000000000000# SPDX-FileCopyrightText: # # SPDX-License-Identifier: AGPL-3.0-or-later pontos-25.3.2/pontos/updateheader/templates/AGPL-3.0-or-later/template.py000066400000000000000000000001321476255566300260500ustar00rootroot00000000000000# SPDX-FileCopyrightText: # # SPDX-License-Identifier: AGPL-3.0-or-later pontos-25.3.2/pontos/updateheader/templates/AGPL-3.0-or-later/template.sh000066400000000000000000000001441476255566300260350ustar00rootroot00000000000000#!/bin/sh # SPDX-FileCopyrightText: # # SPDX-License-Identifier: AGPL-3.0-or-later pontos-25.3.2/pontos/updateheader/templates/AGPL-3.0-or-later/template.ts000066400000000000000000000001411476255566300260460ustar00rootroot00000000000000/* SPDX-FileCopyrightText: * * SPDX-License-Identifier: AGPL-3.0-or-later */ pontos-25.3.2/pontos/updateheader/templates/AGPL-3.0-or-later/template.tsx000066400000000000000000000001411476255566300262360ustar00rootroot00000000000000/* SPDX-FileCopyrightText: * * SPDX-License-Identifier: AGPL-3.0-or-later */ pontos-25.3.2/pontos/updateheader/templates/AGPL-3.0-or-later/template.txt000066400000000000000000000001251476255566300262410ustar00rootroot00000000000000SPDX-FileCopyrightText: SPDX-License-Identifier: AGPL-3.0-or-later pontos-25.3.2/pontos/updateheader/templates/AGPL-3.0-or-later/template.xml000066400000000000000000000001651476255566300262260ustar00rootroot00000000000000 pontos-25.3.2/pontos/updateheader/templates/AGPL-3.0-or-later/template.xsl000066400000000000000000000001651476255566300262340ustar00rootroot00000000000000 pontos-25.3.2/pontos/updateheader/templates/GPL-2.0-only/000077500000000000000000000000001476255566300230215ustar00rootroot00000000000000pontos-25.3.2/pontos/updateheader/templates/GPL-2.0-only/template.c000066400000000000000000000001341476255566300247760ustar00rootroot00000000000000/* SPDX-FileCopyrightText: * * SPDX-License-Identifier: GPL-2.0-only */ pontos-25.3.2/pontos/updateheader/templates/GPL-2.0-only/template.h000066400000000000000000000001341476255566300250030ustar00rootroot00000000000000/* SPDX-FileCopyrightText: * * SPDX-License-Identifier: GPL-2.0-only */ pontos-25.3.2/pontos/updateheader/templates/GPL-2.0-only/template.nasl000066400000000000000000000003331476255566300255120ustar00rootroot00000000000000# SPDX-FileCopyrightText: # Some text descriptions might be excerpted from (a) referenced # source(s), and are Copyright (C) by the respective right holder(s). # # SPDX-License-Identifier: GPL-2.0-only pontos-25.3.2/pontos/updateheader/templates/GPL-2.0-or-later/000077500000000000000000000000001476255566300235655ustar00rootroot00000000000000pontos-25.3.2/pontos/updateheader/templates/GPL-2.0-or-later/template.bash000066400000000000000000000001451476255566300262370ustar00rootroot00000000000000#!/bin/bash # SPDX-FileCopyrightText: # # SPDX-License-Identifier: GPL-2.0-or-later pontos-25.3.2/pontos/updateheader/templates/GPL-2.0-or-later/template.c000066400000000000000000000001401476255566300255370ustar00rootroot00000000000000/* SPDX-FileCopyrightText: * * SPDX-License-Identifier: GPL-2.0-or-later */ pontos-25.3.2/pontos/updateheader/templates/GPL-2.0-or-later/template.cmake000066400000000000000000000001311476255566300263750ustar00rootroot00000000000000# SPDX-FileCopyrightText: # # SPDX-License-Identifier: GPL-2.0-or-later pontos-25.3.2/pontos/updateheader/templates/GPL-2.0-or-later/template.h000066400000000000000000000001401476255566300255440ustar00rootroot00000000000000/* SPDX-FileCopyrightText: * * SPDX-License-Identifier: GPL-2.0-or-later */ pontos-25.3.2/pontos/updateheader/templates/GPL-2.0-or-later/template.js000066400000000000000000000001401476255566300257310ustar00rootroot00000000000000/* SPDX-FileCopyrightText: * * SPDX-License-Identifier: GPL-2.0-or-later */ pontos-25.3.2/pontos/updateheader/templates/GPL-2.0-or-later/template.nasl000066400000000000000000000003371476255566300262620ustar00rootroot00000000000000# SPDX-FileCopyrightText: # Some text descriptions might be excerpted from (a) referenced # source(s), and are Copyright (C) by the respective right holder(s). # # SPDX-License-Identifier: GPL-2.0-or-later pontos-25.3.2/pontos/updateheader/templates/GPL-2.0-or-later/template.po000066400000000000000000000001311476255566300257330ustar00rootroot00000000000000# SPDX-FileCopyrightText: # # SPDX-License-Identifier: GPL-2.0-or-later pontos-25.3.2/pontos/updateheader/templates/GPL-2.0-or-later/template.py000066400000000000000000000001311476255566300257450ustar00rootroot00000000000000# SPDX-FileCopyrightText: # # SPDX-License-Identifier: GPL-2.0-or-later pontos-25.3.2/pontos/updateheader/templates/GPL-2.0-or-later/template.sh000066400000000000000000000001431476255566300257320ustar00rootroot00000000000000#!/bin/sh # SPDX-FileCopyrightText: # # SPDX-License-Identifier: GPL-2.0-or-later pontos-25.3.2/pontos/updateheader/templates/GPL-2.0-or-later/template.ts000066400000000000000000000001401476255566300257430ustar00rootroot00000000000000/* SPDX-FileCopyrightText: * * SPDX-License-Identifier: GPL-2.0-or-later */ pontos-25.3.2/pontos/updateheader/templates/GPL-2.0-or-later/template.tsx000066400000000000000000000001401476255566300261330ustar00rootroot00000000000000/* SPDX-FileCopyrightText: * * SPDX-License-Identifier: GPL-2.0-or-later */ pontos-25.3.2/pontos/updateheader/templates/GPL-2.0-or-later/template.txt000066400000000000000000000001241476255566300261360ustar00rootroot00000000000000SPDX-FileCopyrightText: SPDX-License-Identifier: GPL-2.0-or-later pontos-25.3.2/pontos/updateheader/templates/GPL-2.0-or-later/template.xml000066400000000000000000000001641476255566300261230ustar00rootroot00000000000000 pontos-25.3.2/pontos/updateheader/templates/GPL-2.0-or-later/template.xsl000066400000000000000000000001641476255566300261310ustar00rootroot00000000000000 pontos-25.3.2/pontos/updateheader/templates/GPL-3.0-or-later/000077500000000000000000000000001476255566300235665ustar00rootroot00000000000000pontos-25.3.2/pontos/updateheader/templates/GPL-3.0-or-later/template.bash000066400000000000000000000001451476255566300262400ustar00rootroot00000000000000#!/bin/bash # SPDX-FileCopyrightText: # # SPDX-License-Identifier: GPL-3.0-or-later pontos-25.3.2/pontos/updateheader/templates/GPL-3.0-or-later/template.c000066400000000000000000000001401476255566300255400ustar00rootroot00000000000000/* SPDX-FileCopyrightText: * * SPDX-License-Identifier: GPL-3.0-or-later */ pontos-25.3.2/pontos/updateheader/templates/GPL-3.0-or-later/template.cmake000066400000000000000000000001311476255566300263760ustar00rootroot00000000000000# SPDX-FileCopyrightText: # # SPDX-License-Identifier: GPL-3.0-or-later pontos-25.3.2/pontos/updateheader/templates/GPL-3.0-or-later/template.go000066400000000000000000000001341476255566300257260ustar00rootroot00000000000000// SPDX-FileCopyrightText: // // SPDX-License-Identifier: GPL-3.0-or-later pontos-25.3.2/pontos/updateheader/templates/GPL-3.0-or-later/template.h000066400000000000000000000001401476255566300255450ustar00rootroot00000000000000/* SPDX-FileCopyrightText: * * SPDX-License-Identifier: GPL-3.0-or-later */ pontos-25.3.2/pontos/updateheader/templates/GPL-3.0-or-later/template.js000066400000000000000000000001401476255566300257320ustar00rootroot00000000000000/* SPDX-FileCopyrightText: * * SPDX-License-Identifier: GPL-3.0-or-later */ pontos-25.3.2/pontos/updateheader/templates/GPL-3.0-or-later/template.nasl000066400000000000000000000003371476255566300262630ustar00rootroot00000000000000# SPDX-FileCopyrightText: # Some text descriptions might be excerpted from (a) referenced # source(s), and are Copyright (C) by the respective right holder(s). # # SPDX-License-Identifier: GPL-3.0-or-later pontos-25.3.2/pontos/updateheader/templates/GPL-3.0-or-later/template.po000066400000000000000000000001311476255566300257340ustar00rootroot00000000000000# SPDX-FileCopyrightText: # # SPDX-License-Identifier: GPL-3.0-or-later pontos-25.3.2/pontos/updateheader/templates/GPL-3.0-or-later/template.py000066400000000000000000000001311476255566300257460ustar00rootroot00000000000000# SPDX-FileCopyrightText: # # SPDX-License-Identifier: GPL-3.0-or-later pontos-25.3.2/pontos/updateheader/templates/GPL-3.0-or-later/template.sh000066400000000000000000000001431476255566300257330ustar00rootroot00000000000000#!/bin/sh # SPDX-FileCopyrightText: # # SPDX-License-Identifier: GPL-3.0-or-later pontos-25.3.2/pontos/updateheader/templates/GPL-3.0-or-later/template.ts000066400000000000000000000001401476255566300257440ustar00rootroot00000000000000/* SPDX-FileCopyrightText: * * SPDX-License-Identifier: GPL-3.0-or-later */ pontos-25.3.2/pontos/updateheader/templates/GPL-3.0-or-later/template.tsx000066400000000000000000000001401476255566300261340ustar00rootroot00000000000000/* SPDX-FileCopyrightText: * * SPDX-License-Identifier: GPL-3.0-or-later */ pontos-25.3.2/pontos/updateheader/templates/GPL-3.0-or-later/template.txt000066400000000000000000000001241476255566300261370ustar00rootroot00000000000000SPDX-FileCopyrightText: SPDX-License-Identifier: GPL-3.0-or-later pontos-25.3.2/pontos/updateheader/templates/GPL-3.0-or-later/template.xml000066400000000000000000000001641476255566300261240ustar00rootroot00000000000000 pontos-25.3.2/pontos/updateheader/templates/GPL-3.0-or-later/template.xsl000066400000000000000000000001641476255566300261320ustar00rootroot00000000000000 pontos-25.3.2/pontos/updateheader/updateheader.py000066400000000000000000000335121476255566300221360ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2019-2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # """Script to update the year of last modification in the license header of source code files.\n Also it appends a header if it is missing in the file. """ import io import re import sys from dataclasses import dataclass from functools import cache from pathlib import Path from typing import Optional, Sequence, Union from pontos.errors import PontosError from pontos.git import Git from pontos.terminal.null import NullTerminal from pontos.terminal.rich import RichTerminal from ._parser import parse_args SUPPORTED_FILE_TYPES = [ ".bash", ".c", ".h", ".go", ".cmake", ".js", ".nasl", ".po", ".py", ".sh", ".ts", ".tsx", ".txt", ".xml", ".xsl", ] OLD_LINES = [ "# \-\*\- coding: utf\-8 \-\*\-", "This program is free software: you can redistribute it and/or modify", "it under the terms of the GNU Affero General Public License as", "published by the Free Software Foundation, either version 3 of the", "License, or \(at your option\) any later version.", "This program is free software; you can redistribute it and/or", "modify it under the terms of the GNU General Public License", "version 2 as published by the Free Software Foundation.", "This program is free software: you can redistribute it and/or modify", "it under the terms of the GNU General Public License as published by", "the Free Software Foundation, either version 3 of the License, or", "\(at your option\) any later version.", "This program is distributed in the hope that it will be useful,", "but WITHOUT ANY WARRANTY; without even the implied warranty of", "MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the", "GNU Affero General Public License for more details.", "GNU General Public License for more details.", "You should have received a copy of the GNU Affero General Public License", "You should have received a copy of the GNU General Public License", "along with this program. If not, see .", "along with this program; if not, write to the Free Software", "Foundation, Inc\., 51 Franklin St, Fifth Floor, Boston, MA 02110\-1301 USA\.", # noqa: E501 ] def _get_modified_year(f: Path) -> str: """In case of the changed arg, update year to last modified year""" try: ret = Git().log("-1", "--date=format:%Y", str(f), format="%ad")[0] except IndexError: raise PontosError(f'Empty "git log -1" output for {f}.') return ret @dataclass class CopyrightMatch: creation_year: str modification_year: Optional[str] company: str def _find_copyright( line: str, copyright_regex: re.Pattern, ) -> tuple[bool, Union[CopyrightMatch, None]]: """Match the line for the copyright_regex""" copyright_match = re.search(copyright_regex, line) if copyright_match: return ( True, CopyrightMatch( creation_year=copyright_match.group(2), modification_year=copyright_match.group(3), company=copyright_match.group(4), ), ) return False, None def _add_header( suffix: str, license_id: str, company: str, year: str ) -> Union[str, None]: """Tries to add the header to the file. Requirements: - file type must be supported - license file must exist """ if suffix in SUPPORTED_FILE_TYPES: root = Path(__file__).parent license_file = root / "templates" / license_id / f"template{suffix}" try: return ( license_file.read_text(encoding="utf-8") .replace("", company) .replace("", year) ) except FileNotFoundError as e: raise e else: raise ValueError def _remove_outdated_lines( content: str, cleanup_regexes: list[re.Pattern] ) -> Optional[str]: """Remove lines that contain outdated copyright header ...""" changed = False splitted_lines = content.splitlines() i = 0 for line in splitted_lines[:20]: if i > 3 and re.match(r"^(([#*]|//) ?$)", line): splitted_lines.pop(i) continue for regex in cleanup_regexes: if regex.match(line): changed = True splitted_lines.pop(i) i = i - 1 break i = i + 1 if changed: new_content = "\n".join(splitted_lines) + "\n" return new_content return None def update_file( file: Path, year: str, license_id: str, company: str, *, cleanup: bool = False, single_year: bool = False, ) -> None: """Function to update the header of the given file Checks if header exists. If not it adds an header to that file, otherwise it checks if year is up to date """ copyright_regex = _compile_copyright_regex() cleanup_regexes = _compile_outdated_regex() if cleanup else None try: with file.open("r+") as fp: found = False i = 10 # assume that copyright is in the first 10 lines while not found and i > 0: line = fp.readline() if line == "": i = 0 continue found, copyright_match = _find_copyright( line=line, copyright_regex=copyright_regex ) i = i - 1 # header not found, add header if i == 0 and not found: try: header = _add_header( file.suffix, license_id, company, year, ) if header: fp.seek(0) # back to beginning of file rest_of_file = fp.read() fp.seek(0) fp.write(header + "\n" + rest_of_file) print(f"{file}: Added license header.") return except ValueError: print( f"{file}: No license header for the" f" format {file.suffix} found.", ) except FileNotFoundError: print( f"{file}: License file for {license_id} " "is not existing." ) return # replace found header and write it to file if copyright_match: # use different target license formats depending on provided single_year argument if single_year: copyright_term = ( f"SPDX-FileCopyrightText: " f"{copyright_match.creation_year} " f"{company}" ) else: copyright_term = ( f"SPDX-FileCopyrightText: " f"{copyright_match.creation_year}" f"-{year} {company}" ) with_multi_year = ( copyright_match.creation_year and copyright_match.modification_year ) with_single_year_outdated = ( not copyright_match.modification_year and int(copyright_match.creation_year) < int(year) ) with_multi_year_outdated = False if with_multi_year: # assert to silence mypy assert isinstance(copyright_match.modification_year, str) with_multi_year_outdated = int( copyright_match.modification_year ) < int(year) if single_year and with_multi_year: _substitute_license_text( fp, line, copyright_regex, copyright_term ) print( f"{file}: Changed License Header Copyright Year format to single year " f"{copyright_match.creation_year}-{year} -> " f"{copyright_match.creation_year}" ) elif not single_year and ( with_multi_year_outdated or with_single_year_outdated ): _substitute_license_text( fp, line, copyright_regex, copyright_term ) print( f"{file}: Changed License Header Copyright Year " f"{copyright_match.modification_year} -> " f"{year}" ) else: print(f"{file}: License Header is ok.") except FileNotFoundError as e: print(f"{file}: File is not existing.") raise e except UnicodeDecodeError as e: print(f"{file}: Ignoring binary file.") raise e # old header existing - cleanup? if cleanup_regexes: old_content = file.read_text(encoding="utf-8") new_content = _remove_outdated_lines( content=old_content, cleanup_regexes=cleanup_regexes ) if new_content: file.write_text(new_content, encoding="utf-8") print(f"{file}: Cleaned up!") def _substitute_license_text( fp: io.TextIOWrapper, line: str, copyright_regex: re.Pattern, copyright_term: str, ) -> None: """Substitute the old license text in file fp, starting on provided line, with the new one provided in copyright_term""" new_line = re.sub(copyright_regex, copyright_term, line) fp_write = fp.tell() - len(line) # save position to insert rest_of_file = fp.read() fp.seek(fp_write) fp.write(new_line) fp.write(rest_of_file) # in some cases we replace "YYYY - YYYY" with "YYYY-YYYY" # resulting in 2 characters left at the end of the file # so we truncate the file, just in case! fp.truncate() def _get_exclude_list( exclude_file: Path, directories: list[Path] ) -> list[Path]: """Tries to get the list of excluded files / directories. If a file is given, it will be used. Otherwise it will be searched in the executed root path. The ignore file should only contain relative paths like *.py, not absolute as **/*.py """ if exclude_file is None: exclude_file = Path(".pontos-header-ignore") if not exclude_file.is_file(): return [] exclude_lines = exclude_file.read_text(encoding="utf-8").splitlines() expanded_globs = [ directory.rglob(line.strip()) for directory in directories for line in exclude_lines if line ] exclude_list = [] for glob_paths in expanded_globs: for path in glob_paths: if path.is_dir(): for efile in path.rglob("*"): exclude_list.append(efile.absolute()) else: exclude_list.append(path.absolute()) return exclude_list @cache def _compile_outdated_regex() -> list[re.Pattern]: """prepare regex patterns to remove old copyright lines""" return [re.compile(rf"^(([#*]|//) ?)?{line}") for line in OLD_LINES] @cache def _compile_copyright_regex() -> re.Pattern: """prepare the copyright regex""" c_str = r"(SPDX-FileCopyrightText:|[Cc]opyright)" d_str = r"(19[0-9]{2}|20[0-9]{2})" return re.compile(rf"{c_str}.*? {d_str}?-? ?{d_str}? (.+)") def main(args: Optional[Sequence[str]] = None) -> None: parsed_args = parse_args(args) exclude_list = [] year: str = parsed_args.year license_id: str = parsed_args.license_id company: str = parsed_args.company changed: bool = parsed_args.changed quiet: bool = parsed_args.quiet cleanup: bool = parsed_args.cleanup single_year: bool = parsed_args.single_year if quiet: term: Union[NullTerminal, RichTerminal] = NullTerminal() else: term = RichTerminal() term.bold_info("pontos-update-header") if parsed_args.directories: if isinstance(parsed_args.directories, list): directories = [ Path(directory) for directory in parsed_args.directories ] else: directories = [Path(parsed_args.directories)] # get file paths to exclude exclude_list = _get_exclude_list(parsed_args.exclude_file, directories) # get files to update files = [ Path(file) for directory in directories for file in directory.rglob("*") if file.is_file() ] elif parsed_args.files: if isinstance(parsed_args.files, list): files = [Path(name) for name in parsed_args.files] else: files = [Path(parsed_args.files)] else: # should never happen term.error("Specify files to update!") sys.exit(1) for file in files: try: if file.absolute() in exclude_list: term.warning(f"{file}: Ignoring file from exclusion list.") else: if changed: try: year = _get_modified_year(file) except PontosError: term.warning( f"{file}: Could not get date of last modification" f" via git, using {year} instead." ) update_file( file, year, license_id, company, cleanup=cleanup, single_year=single_year, ) except (FileNotFoundError, UnicodeDecodeError, ValueError): continue if __name__ == "__main__": main() pontos-25.3.2/pontos/version/000077500000000000000000000000001476255566300161575ustar00rootroot00000000000000pontos-25.3.2/pontos/version/__init__.py000066400000000000000000000007121476255566300202700ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2020-2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # from .__version__ import __version__ from ._calculator import VersionCalculator from ._errors import VersionError from ._main import main from ._version import ParseVersionFuncType, Version, VersionUpdate __all__ = ( "__version__", "VersionError", "ParseVersionFuncType", "Version", "VersionCalculator", "VersionUpdate", "main", ) pontos-25.3.2/pontos/version/__main__.py000066400000000000000000000003011476255566300202430ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2020-2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # # pylint: disable=invalid-name from ._main import main if __name__ == "__main__": main() pontos-25.3.2/pontos/version/__version__.py000066400000000000000000000001471476255566300210140ustar00rootroot00000000000000# pylint: disable=invalid-name # THIS IS AN AUTOGENERATED FILE. DO NOT TOUCH! __version__ = "25.3.2" pontos-25.3.2/pontos/version/_calculator.py000066400000000000000000000202141476255566300210200ustar00rootroot00000000000000# Copyright (C) 2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # from abc import ABC, abstractmethod from datetime import datetime from typing import Type from ._errors import VersionError from ._version import Version class VersionCalculator(ABC): """ An abstract base class for calculating a next version from a version """ version_cls: Type[Version] @classmethod def version_from_string(cls, version: str) -> Version: """ Create a version from a version string Args: version: Version string to parse Raises: VersionError: If the version string is invalid. Returns: A new version instance """ return cls.version_cls.from_string(version) @classmethod def next_calendar_version(cls, current_version: Version) -> Version: """ Find the correct next calendar version by checking latest version and the today's date Raises: VersionError: If version is invalid. """ today = datetime.today() current_year_short = today.year % 100 if current_version.major > 2000: # version expected to be YYYY.MM.P current_year = today.year else: # version expected to be YY.MM.P current_year = current_year_short if current_version.major < current_year or ( current_version.major == current_year and current_version.minor < today.month ): return cls.version_from_string(f"{current_year}.{today.month}.0") if ( current_version.major == current_year and current_version.minor == today.month ): if current_version.dev is None: release_version = cls.version_from_string( f"{current_year}.{today.month}." f"{current_version.patch + 1}" ) else: release_version = cls.version_from_string( f"{current_year}.{today.month}." f"{current_version.patch}" ) return release_version else: raise VersionError( f"'{current_version}' is higher than " f"'{current_year}.{today.month}'." ) @classmethod def next_major_version(cls, current_version: Version) -> Version: """ Get the next major version from a valid version Examples: "1.2.3" will return "2.0.0" "1.2.3.dev1" will return "1.2.3" "1.2.3-alpha1" will return "1.2.3" "1.0.0" will return "2.0.0" "1.0.0-a1" will return "1.0.0" "1.0.0.dev1" will return "1.0.0" "0.5.0-a1" will return "1.0.0" "0.5.0.dev1" will return "1.0.0" """ if ( (current_version.is_pre_release or current_version.is_dev_release) and current_version.patch == 0 and current_version.minor == 0 ): return cls.version_from_string( f"{current_version.major}.{current_version.minor}." f"{current_version.patch}" ) return cls.version_from_string(f"{current_version.major + 1}.0.0") @classmethod def next_minor_version(cls, current_version: Version) -> Version: """ Get the next minor version from a valid version Examples: "1.2.3" will return "1.3.0" "1.2.3.dev1" will return "1.3.0" "1.2.3-alpha1" will return "1.3.0" "1.0.0" will return "1.1.0" "1.0.0-a1" will return "1.0.0" "1.0.0.dev1" will return "1.0.0" "0.5.0-a1" will return "0.5.0" "0.5.0.dev1" will return "0.5.0" """ if ( current_version.is_pre_release or current_version.is_dev_release ) and current_version.patch == 0: return cls.version_from_string( f"{current_version.major}.{current_version.minor}." f"{current_version.patch}" ) return cls.version_from_string( f"{current_version.major}.{current_version.minor + 1}.0" ) @classmethod def next_patch_version(cls, current_version: Version) -> Version: """ Get the next patch version from a valid version Examples: "1.2.3" will return "1.2.4" "1.2.3.dev1" will return "1.2.3" "1.2.3-dev1" will return "1.2.3" "1.2.3+dev1" will return "1.2.4" "1.2.3-alpha1" will return "1.2.3" "1.0.0" will return "1.0.1" "1.0.0-a1" will return "1.0.0" "1.0.0.dev1" will return "1.0.0" "0.5.0-a1" will return "0.5.0" "0.5.0.dev1" will return "0.5.0" """ if not current_version: raise VersionError("No current version passed.") if current_version.is_dev_release or current_version.is_pre_release: next_version = cls.version_from_string( f"{current_version.major}." f"{current_version.minor}." f"{current_version.patch}" ) else: next_version = cls.version_from_string( f"{current_version.major}." f"{current_version.minor}." f"{current_version.patch + 1}" ) return next_version @staticmethod @abstractmethod def next_dev_version(current_version: Version) -> Version: """ Get the next development version from a valid version Examples: "1.2.3" will return "1.2.4-dev1" "1.2.3.dev1" will return "1.2.3.dev2" "1.2.3-dev1" will return "1.2.3-dev2" "1.2.3+dev1" will return "1.2.4-dev1" "1.2.3-alpha1" will return "1.2.3-alpha2-dev1" "1.0.0" will return "1.0.1-dev1" "1.0.0-a1" will return "1.0.0-a2-dev1" "1.0.0.dev1" will return "1.0.0.dev2" "0.5.0-a1" will return "0.5.0-a2-dev1" "0.5.0.dev1" will return "0.5.0.dev2" """ @staticmethod @abstractmethod def next_alpha_version(current_version: Version) -> Version: """ Get the next alpha version from a valid version Examples: "1.2.3" will return "1.2.4-alpha1" "1.2.3.dev1" will return "1.2.3-alpha1" "1.2.3-dev1" will return "1.2.3-alpha1" "1.2.3+dev1" will return "1.2.4-alpha1" "1.2.3-alpha1" will return "1.2.3-alpha2" "1.0.0" will return "1.0.1-alpha1" "1.0.0-a1" will return "1.0.1-alpha1" "1.0.0.dev1" will return "1.0.0-alpha1" "0.5.0-a1" will return "0.5.1-alpha1" "0.5.0.dev1" will return "0.5.0-alpha1" """ @staticmethod @abstractmethod def next_beta_version(current_version: Version) -> Version: """ Get the next beta version from a valid version Examples: "1.2.3" will return "1.2.4-beta1" "1.2.3.dev1" will return "1.2.3-beta1" "1.2.3-dev1" will return "1.2.3-beta1" "1.2.3+dev1" will return "1.2.4-beta1" "1.2.3-alpha1" will return "1.2.3-beta1" "1.0.0" will return "1.0.1-beta1" "1.0.0-a1" will return "1.0.1-beta1" "1.0.0.dev1" will return "1.0.0-beta1" "0.5.0-a1" will return "0.5.1-beta1" "0.5.0.dev1" will return "0.5.0-beta1" """ @staticmethod @abstractmethod def next_release_candidate_version(current_version: Version) -> Version: """ Get the next release candidate version from a valid version Examples: "1.2.3" will return "1.2.4-rc1" "1.2.3.dev1" will return "1.2.3-rc1" "1.2.3-dev1" will return "1.2.3-rc1" "1.2.3+dev1" will return "1.2.4-rc1" "1.2.3-alpha1" will return "1.2.3-rc1" "1.0.0" will return "1.0.1-rc1" "1.0.0-a1" will return "1.0.1-rc1" "1.0.0.dev1" will return "1.0.0-rc1" "0.5.0-a1" will return "0.5.1-rc1" "0.5.0.dev1" will return "0.5.0-rc" """ pontos-25.3.2/pontos/version/_errors.py000066400000000000000000000005071476255566300202060ustar00rootroot00000000000000# Copyright (C) 2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # from pontos.errors import PontosError class VersionError(PontosError): """ Some error has occurred during version handling """ class ProjectError(PontosError): """ An error has occured while gathering a project """ pontos-25.3.2/pontos/version/_main.py000066400000000000000000000057301476255566300176210ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2020-2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # import sys from enum import IntEnum, auto from typing import List, NoReturn, Optional from pontos.errors import PontosError from ._parser import parse_args from .project import Project from .schemes import VersioningScheme class VersionExitCode(IntEnum): SUCCESS = 0 NO_PROJECT = auto() UPDATE_ERROR = auto() CURRENT_VERSION_ERROR = auto() VERIFY_ERROR = auto() NEXT_VERSION_ERROR = auto() def main(args: Optional[List[str]] = None) -> NoReturn: parsed_args = parse_args(args) try: project = Project(parsed_args.versioning_scheme) except PontosError: print("No project found.", file=sys.stderr) sys.exit(VersionExitCode.NO_PROJECT) if parsed_args.command == "update": try: update = project.update_version( parsed_args.version, force=parsed_args.force ) except PontosError as e: print(str(e), file=sys.stderr) sys.exit(VersionExitCode.UPDATE_ERROR) if update.new == update.previous: print("Version is already up-to-date.") else: print(f"Updated version from {update.previous} to {update.new}.") elif parsed_args.command == "show": try: print(str(project.get_current_version())) except PontosError as e: print(str(e), file=sys.stderr) sys.exit(VersionExitCode.CURRENT_VERSION_ERROR) elif parsed_args.command == "verify": try: project.verify_version(parsed_args.version) except PontosError as e: print(str(e), file=sys.stderr) sys.exit(VersionExitCode.VERIFY_ERROR) elif parsed_args.command == "next": scheme: VersioningScheme = parsed_args.versioning_scheme calculator = scheme.calculator() try: current_version = project.get_current_version() except PontosError as e: print(str(e), file=sys.stderr) sys.exit(VersionExitCode.CURRENT_VERSION_ERROR) if parsed_args.type == "dev": print(calculator.next_dev_version(current_version)) elif parsed_args.type == "calendar": print(calculator.next_calendar_version(current_version)) elif parsed_args.type == "alpha": print(calculator.next_alpha_version(current_version)) elif parsed_args.type == "beta": print(calculator.next_beta_version(current_version)) elif parsed_args.type == "rc": print(calculator.next_release_candidate_version(current_version)) elif parsed_args.type == "patch": print(calculator.next_patch_version(current_version)) elif parsed_args.type == "minor": print(calculator.next_minor_version(current_version)) elif parsed_args.type == "major": print(calculator.next_major_version(current_version)) sys.exit(VersionExitCode.SUCCESS) pontos-25.3.2/pontos/version/_parser.py000066400000000000000000000073161476255566300201730ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2023-2024 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # import argparse from typing import Optional, Sequence import shtab from pontos.errors import PontosError from pontos.version.schemes import ( VERSIONING_SCHEMES, VersioningScheme, versioning_scheme_argument_type, ) def initialize_default_parser() -> argparse.ArgumentParser: """ Returns a default argument parser containing: - verify - show - update """ parser = argparse.ArgumentParser( description="Version handling utilities.", prog="version", ) shtab.add_argument_to(parser) subparsers = parser.add_subparsers( title="subcommands", description="Valid subcommands", help="Additional help", dest="command", required=True, ) verify_parser = subparsers.add_parser( "verify", help="Verify version in the current project" ) verify_parser.add_argument( "version", help="Version string to compare", nargs="?", ) verify_parser.add_argument( "--versioning-scheme", help="Versioning scheme to use for parsing and handling version " f"information. Choices are {', '.join(VERSIONING_SCHEMES.keys())}. " "Default: %(default)s", default="pep440", type=versioning_scheme_argument_type, ) show_parser = subparsers.add_parser( "show", help="Show version information of the current project" ) show_parser.add_argument( "--versioning-scheme", help="Versioning scheme to use for parsing and handling version " f"information. Choices are {', '.join(VERSIONING_SCHEMES.keys())}. " "Default: %(default)s", default="pep440", type=versioning_scheme_argument_type, ) update_parser = subparsers.add_parser( "update", help="Update version in the current project" ) update_parser.add_argument( "version", help="Version string to use", ) update_parser.add_argument( "--versioning-scheme", help="Versioning scheme to use for parsing and handling version " f"information. Choices are {', '.join(VERSIONING_SCHEMES.keys())}. " "Default: %(default)s", default="pep440", type=versioning_scheme_argument_type, ) update_parser.add_argument( "--force", help="Don't check if version is already set. " "This will override existing version information!", action="store_true", ) next_parser = subparsers.add_parser( "next", help="Calculate the next release version" ) next_parser.add_argument( "type", help="Next version type", choices=[ "dev", "calendar", "alpha", "beta", "rc", "patch", "minor", "major", ], ) next_parser.add_argument( "--versioning-scheme", help="Versioning scheme to use for parsing and handling version " f"information. Choices are {', '.join(VERSIONING_SCHEMES.keys())}. " "Default: %(default)s", default="pep440", type=versioning_scheme_argument_type, ) return parser def parse_args(args: Optional[Sequence[str]] = None) -> argparse.Namespace: parser = initialize_default_parser() parsed_args = parser.parse_args(args) scheme: VersioningScheme = parsed_args.versioning_scheme version = getattr(parsed_args, "version", None) if version and version != "current": try: parsed_args.version = scheme.parse_version(parsed_args.version) except PontosError as e: parser.error(str(e)) return parsed_args pontos-25.3.2/pontos/version/_version.py000066400000000000000000000121211476255566300203520ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2020-2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # from abc import ABC, abstractmethod from dataclasses import dataclass, field from pathlib import Path from typing import Any, Callable, Optional class Version(ABC): """ An abstract base class for version information A version implementation must consider the following constraints: * Version strings containing `-dev`, and `.dev` are considered development versions. * Version strings containing `+dev` are not considered as development versions. * Development versions are are also pre releases. The following version string is a development version and a pre release: `1.2.3-alpha1-dev1` * A version must return a pre for development versions for version strings containing a pre release version like `1.2.3-alpha1-dev1` * A development version has no local part * Alpha, Beta, Release Candidate and Development versions are pre releases * Alpha, Beta and Release Candidate versions pre must return the following names for the first value in the tuple: `alpha`, `beta`, `rc` and `dev` """ def __init__(self, original_version: str) -> None: self._parsed_version = original_version @property def parsed_version(self) -> str: """ Original version string from which the version has been parsed """ return self._parsed_version @property @abstractmethod def major(self) -> int: """The first item of the version or ``0`` if unavailable.""" @property @abstractmethod def minor(self) -> int: """The second item of the version or ``0`` if unavailable.""" @property @abstractmethod def patch(self) -> int: """The third item of the version or ``0`` if unavailable.""" @property @abstractmethod def pre(self) -> Optional[tuple[str, int]]: """The pre-release segment of the version.""" @property @abstractmethod def dev(self) -> Optional[int]: """The development number of the version.""" @property @abstractmethod def local(self) -> Optional[tuple[str, int]]: """The local version segment of the version.""" @property @abstractmethod def is_pre_release(self) -> bool: """ Whether this version is a pre-release (alpha, beta, release candidate). """ @property @abstractmethod def is_dev_release(self) -> bool: """Whether this version is a development release.""" @property @abstractmethod def is_alpha_release(self) -> bool: """Whether this version is an alpha release.""" @property @abstractmethod def is_beta_release(self) -> bool: """Whether this version is a beta release.""" @property @abstractmethod def is_release_candidate(self) -> bool: """Whether this version is a release candidate.""" @classmethod @abstractmethod def from_string(cls, version: str) -> "Version": """ Create a version from a version string Args: version: Version string to parse Raises: VersionError: If the version string is invalid. Returns: A new version instance """ @classmethod @abstractmethod def from_version(cls, version: "Version") -> "Version": """ Convert a version (if necessary) This method can be used to convert version instances from different versioning schemes. """ @abstractmethod def __eq__(self, other: Any) -> bool: pass @abstractmethod def __ne__(self, other: Any) -> bool: pass @abstractmethod def __gt__(self, other: Any) -> bool: pass @abstractmethod def __ge__(self, other: Any) -> bool: pass @abstractmethod def __lt__(self, other: Any) -> bool: pass @abstractmethod def __le__(self, other: Any) -> bool: pass @abstractmethod def __str__(self) -> str: """A string representation of the version""" def __repr__(self) -> str: """A representation of the Version""" return f"<{self.__class__.__name__}('{self}')>" ParseVersionFuncType = Callable[[str], Version] @dataclass class VersionUpdate: """ Represents a version update from a previous version to a new version. If previous and new are equal the version was not updated and changed_files should be empty. If there is no previous version for example in an initial release previous should be None. Example: .. code-block:: python from pathlib import Path from python.version import Version, VersionUpdate update = VersionUpdate( previous=Version("1.2.3"), new=Version("2.0.0"), changed_files=[Path("package.json"), Path("version.js")], ) """ previous: Optional[Version] new: Version changed_files: list[Path] = field(default_factory=list) @property def is_update(self) -> bool: return self.previous != self.new pontos-25.3.2/pontos/version/commands/000077500000000000000000000000001476255566300177605ustar00rootroot00000000000000pontos-25.3.2/pontos/version/commands/__init__.py000066400000000000000000000017361476255566300221000ustar00rootroot00000000000000# Copyright (C) 2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # from typing import Iterable, Tuple, Type from ._cargo import CargoVersionCommand from ._cmake import CMakeVersionCommand from ._command import VersionCommand from ._go import GoVersionCommand from ._java import JavaVersionCommand from ._javascript import JavaScriptVersionCommand from ._python import PythonVersionCommand __all__ = ( "VersionCommand", "CMakeVersionCommand", "GoVersionCommand", "JavaScriptVersionCommand", "JavaVersionCommand", "PythonVersionCommand", "CargoVersionCommand", "get_commands", ) __COMMANDS: Tuple[Type[VersionCommand]] = ( # type: ignore[assignment] CMakeVersionCommand, GoVersionCommand, JavaVersionCommand, JavaScriptVersionCommand, PythonVersionCommand, CargoVersionCommand, ) def get_commands() -> Iterable[Type[VersionCommand]]: """ Returns the available VersionCommands """ return __COMMANDS pontos-25.3.2/pontos/version/commands/_cargo.py000066400000000000000000000057311476255566300215720ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later from pathlib import Path from typing import Iterator, Literal, Tuple, Union import tomlkit from .._errors import VersionError from .._version import Version, VersionUpdate from ._command import VersionCommand class CargoVersionCommand(VersionCommand): project_file_name = "Cargo.toml" def __as_project_document( self, origin: Path ) -> Iterator[Tuple[Path, tomlkit.TOMLDocument],]: """ Parse the given origin and yields a tuple of path to a cargo toml that contains a version If the origin is invalid toml than it will raise a VersionError. """ content = origin.read_text(encoding="utf-8") content = tomlkit.parse(content) package = content.get("package") if package: version = package.get("version") if version: yield (origin, content) else: workspace = content.get("workspace") if workspace: members = workspace.get("members") for member in members: yield from self.__as_project_document( origin.parent / member / self.project_file_name ) return None def update_version( self, new_version: Version, *, force: bool = False ) -> VersionUpdate: try: previous_version = self.get_current_version() if not force and new_version == previous_version: return VersionUpdate(previous=previous_version, new=new_version) except VersionError: # just ignore current version and override it previous_version = None changed_files = [] for project_path, project in self.__as_project_document( self.project_file_path ): project["package"]["version"] = str(new_version) # type: ignore[index] # noqa: E501 project_path.write_text(tomlkit.dumps(project)) changed_files.append(project_path) return VersionUpdate( previous=previous_version, new=new_version, changed_files=changed_files, ) def get_current_version(self) -> Version: (_, document) = next(self.__as_project_document(self.project_file_path)) current_version = self.versioning_scheme.parse_version( document["package"]["version"] # type: ignore[index, arg-type] ) return self.versioning_scheme.from_version(current_version) def verify_version( self, version: Union[Literal["current"], Version, None] ) -> None: current_version = self.get_current_version() if not version or version == "current": return if current_version != version: raise VersionError( f"Provided version {version} does not match the " f"current version {current_version}." ) pontos-25.3.2/pontos/version/commands/_cmake.py000066400000000000000000000200461476255566300215530ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2020-2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # import re from typing import Iterator, Literal, Optional, Tuple, Union from .._errors import VersionError from .._version import Version, VersionUpdate from ..schemes import PEP440VersioningScheme from ._command import VersionCommand class CMakeVersionCommand(VersionCommand): project_file_name = "CMakeLists.txt" def update_version( self, new_version: Version, *, force: bool = False ) -> VersionUpdate: content = self.project_file_path.read_text(encoding="utf-8") cmake_version_parser = CMakeVersionParser(content) try: previous_version = self.get_current_version() if not force and new_version == previous_version: return VersionUpdate(previous=previous_version, new=new_version) except VersionError: # just ignore current version and override it previous_version = None new_content = cmake_version_parser.update_version(new_version) self.project_file_path.write_text(new_content, encoding="utf-8") return VersionUpdate( previous=previous_version, new=new_version, changed_files=[self.project_file_path], ) def get_current_version(self) -> Version: if not self.project_file_path.exists(): raise VersionError(f"{self.project_file_path} not found.") content = self.project_file_path.read_text(encoding="utf-8") current_version = CMakeVersionParser(content).get_current_version() return self.versioning_scheme.from_version(current_version) def verify_version( self, version: Union[Literal["current"], Version, None] ) -> None: current_version = self.get_current_version() if not version or version == "current": return if current_version != version: raise VersionError( f"Provided version {version} does not match the " f"current version {current_version}." ) class CMakeVersionParser: def __init__(self, cmake_content_lines: str): line_no, current_version, pd_line_no, pd = self._find_version_in_cmake( cmake_content_lines ) self._cmake_content_lines = cmake_content_lines.split("\n") self._version_line_number = line_no self._current_version = current_version self._project_dev_version_line_number = pd_line_no self._project_dev_version = pd # The tokenizer is used to parse and identify specific elements in CMake scripts. # We are interested in identifying words that represent functions, variables, and their values. # Specifically, we want to scan for the words 'project', 'version', 'set', 'PROJECT_DEV_VERSION', # and their respective values, as we need to modify them. __cmake_scanner = re.Scanner( # type: ignore [ ( r"#.*", lambda _, token: ("comment", token), ), # so that we can skip ahead ( r'"[^"]*"', lambda _, token: ("string", token), ), # so that we can verify if a value is a string value ( r'"[0-9]+"', lambda _, token: ("number", token), ), # so that we can verify if a value is numeric ( r"\(", lambda _, token: ("open_bracket", token), ), # so that we can identify function calls ( r"\)", lambda _, token: ("close_bracket", token), ), # so that we can identify end of function calls ( r'[^ \t\r\n()#"]+', lambda _, token: ("word", token), ), # so that we can identify words (identifiers) ( r"\n", lambda _, token: ("newline", token), ), # so that we can keep track of the position ( r"\s+", lambda _, token: ("special_printable", token), ), # so that we can keep track of the position ] ) def get_current_version(self) -> Version: return ( PEP440VersioningScheme.parse_version( f"{self._current_version}.dev1" ) if self.is_dev_version() else PEP440VersioningScheme.parse_version(self._current_version) ) def update_version(self, new_version: Version) -> str: if new_version.is_dev_release: new_version = PEP440VersioningScheme.parse_version( f"{str(new_version.major)}." f"{str(new_version.minor)}." f"{str(new_version.patch)}" ) develop = True else: develop = False to_update = self._cmake_content_lines[self._version_line_number] updated = to_update.replace(self._current_version, str(new_version)) self._cmake_content_lines[self._version_line_number] = updated self._current_version = str(new_version) if self._project_dev_version_line_number: self._cmake_content_lines[self._project_dev_version_line_number] = ( self._cmake_content_lines[ self._project_dev_version_line_number ].replace(str(int(not develop)), str(int(develop))) ) self._project_dev_version = str(int(develop)) return "\n".join(self._cmake_content_lines) def _find_version_in_cmake( self, content: str ) -> Tuple[int, str, Optional[int], Optional[str]]: in_project = False in_version = False version_line_no: Optional[int] = None version: Optional[str] = None in_set = False in_project_dev_version = False project_dev_version_line_no: Optional[int] = None project_dev_version: Optional[str] = None for lineno, token_type, value in self._tokenize(content): if token_type == "word" and value == "project": in_project = True elif in_project and token_type == "word" and value == "VERSION": in_version = True elif in_version and ( token_type == "word" or token_type == "string" ): version_line_no = lineno version = value in_project = False in_version = False elif token_type == "word" and value == "set": in_set = True elif in_set and token_type == "close_bracket": in_set = False elif ( in_set and token_type == "word" and value == "PROJECT_DEV_VERSION" ): in_project_dev_version = True elif in_project_dev_version and ( token_type == "word" or token_type == "number" ): project_dev_version_line_no = lineno project_dev_version = value in_project_dev_version = False elif in_project and token_type == "close_bracket": raise ValueError("unable to find cmake version in project.") if not version or version_line_no is None: raise ValueError("unable to find cmake version.") return ( version_line_no, version, project_dev_version_line_no, project_dev_version, ) def is_dev_version(self) -> bool: return ( int(self._project_dev_version) == 1 if self._project_dev_version else False ) def _tokenize( # type: ignore self, content: str ) -> Iterator[Tuple[int, str, str]]: toks, remainder = self.__cmake_scanner.scan(content) if remainder != "": print(f"WARNING: unrecognized cmake tokens: {remainder}") line_num = 0 for tok_type, tok_contents in toks: line_num += tok_contents.count("\n") yield line_num, tok_type, tok_contents.strip() pontos-25.3.2/pontos/version/commands/_command.py000066400000000000000000000036551476255566300221200ustar00rootroot00000000000000# Copyright (C) 2023 Greenbone Networks GmbH # # SPDX-License-Identifier: GPL-3.0-or-later # from abc import ABC, abstractmethod from pathlib import Path from typing import Literal, Union from .._version import Version, VersionUpdate from ..schemes import VersioningScheme class VersionCommand(ABC): """Generic class usable to implement the version commands for several programming languages""" project_file_name: str def __init__( self, versioning_scheme: Union[VersioningScheme, type[VersioningScheme]] ) -> None: self.project_file_path = Path.cwd() / self.project_file_name self.versioning_scheme = versioning_scheme @abstractmethod def get_current_version(self) -> Version: """Get the current version of this project""" @abstractmethod def verify_version( self, version: Union[Literal["current"], Version, None] ) -> None: """ Verify the current version of this project Args: version: Version to check against the current applied version of this project. If version is None or "current" the command should verify if all version information is consistent, for example if the version information in several files is the same. """ @abstractmethod def update_version( self, new_version: Version, *, force: bool = False ) -> VersionUpdate: """ Update the current version of this project Args: new_version: Use this version in the update force: Force updating the version even if the current version is the same as the new version Returns: The version update including the changed files """ def project_found(self) -> bool: """ Returns True if a command has detected a corresponding project """ return self.project_file_path.exists() pontos-25.3.2/pontos/version/commands/_go.py000066400000000000000000000061721476255566300211040ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2021-2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # import re from pathlib import Path from typing import Literal, Union from .._errors import VersionError from .._version import Version, VersionUpdate from ._command import VersionCommand VERSION_MATCH = r'var [Vv]ersion = "(.+)"' TEMPLATE = """package main // THIS IS AN AUTOGENERATED FILE. DO NOT TOUCH! var version = "{}" \n""" # This class is used for Go Version command(s) class GoVersionCommand(VersionCommand): project_file_name = "go.mod" version_file_path = Path("version.go") def _update_version_file(self, new_version: Version) -> None: """ Update the version file with the new version """ if self.version_file_path.exists(): version = self.get_current_version() template = self.version_file_path.read_text( encoding="utf-8" ).replace(str(version), str(new_version)) else: template = TEMPLATE.format(str(new_version)) self.version_file_path.write_text(template, encoding="utf-8") def get_current_version(self) -> Version: """Get the current version of this project In go the version is only defined within the repository tags, thus we need to check git, what tag is the latest""" if self.version_file_path.exists(): version_file_text = self.version_file_path.read_text( encoding="utf-8" ) match = re.search(VERSION_MATCH, version_file_text) if match: return self.versioning_scheme.parse_version(match.group(1)) else: raise VersionError( f"No version found in the {self.version_file_path} file." ) else: raise VersionError( f"No {self.version_file_path} file found. " "This file is required for pontos" ) def verify_version( self, version: Union[Literal["current"], Version, None] ) -> None: """Verify the current version of this project""" current_version = self.get_current_version() if not version or version == "current": return if current_version != version: raise VersionError( f"Provided version {version} does not match the " f"current version {current_version}." ) def update_version( self, new_version: Version, *, force: bool = False ) -> VersionUpdate: """Update the current version of this project""" try: current_version = self.get_current_version() if not force and new_version == current_version: return VersionUpdate(previous=current_version, new=new_version) except VersionError: # just ignore current version and override it current_version = None self._update_version_file(new_version=new_version) return VersionUpdate( previous=current_version, new=new_version, changed_files=[self.version_file_path], ) pontos-25.3.2/pontos/version/commands/_java.py000066400000000000000000000134421476255566300214160ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2020-2023 Greenbone AG # # SPDX-License-Identifier: GPL-3.0-or-later # import json import re from pathlib import Path from typing import Any, Dict, List, Literal, Union from .._errors import VersionError from .._version import Version, VersionUpdate from ._command import VersionCommand # This class is used for Java Version command(s) class JavaVersionCommand(VersionCommand): VERSION_PATTERN = ( r"^(?P
.*[^\d])?"
        r"("
        r"?P\d+\.\d+\.\d+"
        r"([-\.]+(dev|rc|beta|a|alpha|b)\d+)*"
        r")"
        r"(?P.*$)"
    )

    project_file_name = "upgradeVersion.json"

    def get_current_version(self) -> Version:
        file_versions = self._read_versions_from_files()

        last_version = self._verify_version(file_versions)

        if last_version == "":
            raise VersionError("no version found")

        return self.versioning_scheme.parse_version(last_version)

    def verify_version(
        self, version: Union[Literal["current"], Version, None]
    ) -> None:
        file_versions = self._read_versions_from_files()

        last_version = self._verify_version(file_versions)

        if last_version != str(version):
            raise VersionError(
                f"Provided version {version} does not match the "
                f"current version {last_version} "
                f"in '{self.project_file_path}'"
            )

    def update_version(
        self, new_version: Version, *, force: bool = False
    ) -> VersionUpdate:
        try:
            current_version = self.get_current_version()
            if not force and new_version == current_version:
                return VersionUpdate(previous=current_version, new=new_version)
        except VersionError:
            # just ignore current version and override it
            current_version = None

        changed_files = self._update_version_files(new_version)

        return VersionUpdate(
            previous=current_version,
            new=new_version,
            changed_files=changed_files,
        )

    def parse_line(self, version_line: str):
        return re.match(self.VERSION_PATTERN, version_line, re.DOTALL)

    def _update_version_files(self, new_version) -> List[Path]:
        config = self._load_config()

        changed_files: List[Path] = []
        for file_config in config["files"]:
            file_path = file_config["path"]
            with (Path.cwd() / file_path).open("r") as input_file_handle:
                lines = input_file_handle.readlines()
                line_number = file_config["line"]
                version_line = lines[line_number - 1]

                matches = self.parse_line(version_line)
                if matches is None:
                    raise VersionError(
                        f"Line has no version, "
                        f"file:'{file_path}' "
                        f"lineNo:{line_number} "
                        f"content:'{version_line}'"
                    )
                lines[line_number - 1] = (
                    matches.group("pre")
                    + str(new_version)
                    + matches.group("post")
                )

                content = "".join(lines)
                with (Path.cwd() / file_path).open("w") as output_file_handle:
                    output_file_handle.write(content)
                changed_files.append(Path(file_config["path"]))
        return changed_files

    def _load_config(self) -> Dict[str, Any]:
        version_config_file = Path.cwd() / "upgradeVersion.json"
        if not version_config_file.exists():
            raise VersionError(
                f"No {version_config_file} config file found. "
                "This file is required for pontos"
            )

        with version_config_file.open("r") as f:
            json_string = f.read()
            config = json.loads(json_string)
            return config

    def _read_versions_from_files(self) -> Dict[str, str]:
        config = self._load_config()

        file_versions = {}
        for file_config in config["files"]:
            file_path = file_config["path"]
            file = Path.cwd() / file_path
            if not file.exists():
                raise VersionError(f"No {file} file found.")

            with file.open("r") as f:
                line_number = file_config["line"]
                readlines = f.readlines()
                if line_number - 1 > len(readlines):
                    raise VersionError(
                        f"Line number:{line_number} "
                        f"is beyond file lines:{len(readlines) + 1} "
                        f"file:'{file_path}'"
                    )
                version_line = readlines[line_number - 1]
                matches = self.parse_line(version_line)
                if matches is None:
                    raise VersionError(
                        f"Line has no version, "
                        f"file:'{file_path}' "
                        f"lineNo:{line_number} "
                        f"content:'{version_line}'"
                    )
                file_versions[file_path] = matches.group("version")
        return file_versions

    def _verify_version(self, file_versions: Dict[str, str]) -> str:
        last_version = ""
        last_file_name = ""
        for file_name, version in file_versions.items():
            if last_version == "":
                last_version = version
                last_file_name = file_name
                continue

            if last_version != version:
                raise VersionError(
                    f"Versions are not the same "
                    f"last_file_name:'{last_file_name}' "
                    f"last_version:'{last_version}' "
                    f"file_name:'{file_name}' "
                    f"version:'{version}'"
                )
        return last_version
pontos-25.3.2/pontos/version/commands/_javascript.py000066400000000000000000000134021476255566300226370ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2021-2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later
#

import json
import re
from pathlib import Path
from typing import Any, Dict, Literal, Optional, Union

from .._errors import VersionError
from .._version import Version, VersionUpdate
from ._command import VersionCommand


# This class is used for JavaScript Version command(s)
class JavaScriptVersionCommand(VersionCommand):
    project_file_name = "package.json"
    version_file_paths = (Path("src", "version.js"), Path("src", "version.ts"))
    _package = None

    @property
    def package(self) -> Dict[str, Any]:
        if self._package:
            return self._package

        if not self.project_file_path.exists():
            raise VersionError(f"{self.project_file_path} file not found.")

        try:
            with self.project_file_path.open(mode="r", encoding="utf-8") as fp:
                self._package = json.load(fp)
        except OSError as e:
            raise VersionError(
                "No version tag found. Maybe this "
                "module has not been released at all."
            ) from e
        except json.JSONDecodeError as e:
            raise VersionError(
                "No valid JSON found. Maybe this "
                "module has not been released at all."
            ) from e

        if not self._package.get("version", None):
            raise VersionError(
                f"Version field missing in {self.project_file_path}."
            )

        return self._package

    def _get_current_file_version(
        self, version_file: Path
    ) -> Optional[Version]:
        if not version_file.exists():
            return None

        content = version_file.read_text(encoding="utf-8")
        match = re.search(r'VERSION = "(?P.*)";', content)
        if not match:
            raise VersionError(f"VERSION variable not found in {version_file}")

        return self.versioning_scheme.parse_version(match.group("version"))

    def get_current_version(self) -> Version:
        """Get the current version of this project
        In go the version is only defined within the repository
        tags, thus we need to check git, what tag is the latest"""
        return self.versioning_scheme.parse_version(self.package["version"])

    def verify_version(
        self, version: Union[Literal["current"], Version, None]
    ) -> None:
        """Verify the current version of this project"""
        current_version = self.get_current_version()

        if not version or version == "current":
            for version_file in self.version_file_paths:
                file_version = self._get_current_file_version(version_file)
                if file_version and file_version != current_version:
                    raise VersionError(
                        f"The version {file_version} in "
                        f"{version_file} doesn't match the current "
                        f"version {current_version}."
                    )
            return

        if current_version != version:
            raise VersionError(
                f"Provided version {version} does not match the "
                f"current version {current_version} in "
                f"{self.project_file_path}."
            )

        for version_file in self.version_file_paths:
            file_version = self._get_current_file_version(version_file)
            if file_version and file_version != version:
                raise VersionError(
                    f"Provided version {version} does not match the "
                    f"current version {file_version} in {version_file}."
                )

    def _update_package_json(self, new_version: Version) -> None:
        """
        Update the version in the package.json file
        """
        try:
            self.package["version"] = str(new_version)

            with self.project_file_path.open(mode="w") as fp:
                json.dump(obj=self.package, fp=fp, indent=2)

        except EnvironmentError as e:
            raise VersionError(
                "No version tag found. Maybe this "
                "module has not been released at all."
            ) from e
        except json.JSONDecodeError as e:
            raise VersionError("Couldn't load JSON") from e

    def _update_version_file(
        self, version_file: Path, new_version: Version
    ) -> bool:
        """
        Update the version file with the new version
        """
        if not version_file.exists():
            return False

        content = version_file.read_text(encoding="utf-8")
        match = re.search(
            pattern=r'VERSION = (?P[\'"])(?P.*)(?P=quote)',
            string=content,
        )
        if not match:
            return False

        content = content.replace(match.group("version"), str(new_version))
        version_file.write_text(content, encoding="utf-8")
        return True

    def update_version(
        self, new_version: Version, *, force: bool = False
    ) -> VersionUpdate:
        try:
            package_version = self.get_current_version()
            if not force and new_version == package_version:
                return VersionUpdate(previous=package_version, new=new_version)
        except VersionError:
            # just ignore current version and override it
            package_version = None

        changed_files = [self.project_file_path]
        self._update_package_json(new_version=new_version)

        for version_file in self.version_file_paths:
            updated = self._update_version_file(
                version_file, new_version=new_version
            )
            if updated:
                changed_files.append(version_file)

        return VersionUpdate(
            previous=package_version,
            new=new_version,
            changed_files=changed_files,
        )
pontos-25.3.2/pontos/version/commands/_python.py000066400000000000000000000176021476255566300220200ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2020-2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later
#

import importlib.util
from pathlib import Path
from typing import Literal, Union

import tomlkit

from .._errors import VersionError
from .._version import Version, VersionUpdate
from ..schemes import PEP440VersioningScheme
from ._command import VersionCommand

TEMPLATE = """# pylint: disable=invalid-name

# THIS IS AN AUTOGENERATED FILE. DO NOT TOUCH!

__version__ = "{}"\n"""


# This class is used for Python Version command(s)
class PythonVersionCommand(VersionCommand):
    project_file_name = "pyproject.toml"
    _version_file_path = None
    _pyproject_toml = None

    def _get_version_from_pyproject_toml(self) -> Version:
        """
        Return the version information from the [tool.poetry] section of the
        pyproject.toml file. The version may be in non standardized form.
        """

        if (
            "tool" in self.pyproject_toml
            and "poetry" in self.pyproject_toml["tool"]  # type: ignore[operator] # noqa: E501
            and "version" in self.pyproject_toml["tool"]["poetry"]  # type: ignore[operator,index] # noqa: E501
        ):
            return PEP440VersioningScheme.parse_version(
                str(self.pyproject_toml["tool"]["poetry"]["version"])  # type: ignore[index] # noqa: E501
            )

        raise VersionError(
            "Version information not found in "
            f"{self.project_file_path} file."
        )

    def _update_version_file(self, new_version: Version) -> None:
        """
        Update the version file with the new version
        """
        self.version_file_path.write_text(
            TEMPLATE.format(str(new_version)), encoding="utf-8"
        )

    def _update_pyproject_version(
        self,
        new_version: Version,
    ) -> None:
        """
        Update the version in the pyproject.toml file
        """
        pyproject_toml = tomlkit.parse(
            self.project_file_path.read_text(encoding="utf-8")
        )

        if "tool" not in pyproject_toml:
            tool_table = tomlkit.table()
            pyproject_toml["tool"] = tool_table

        if "poetry" not in pyproject_toml["tool"]:  # type: ignore
            poetry_table = tomlkit.table()
            # pylint: disable=line-too-long, no-member # ignore pylint (2.13.9) error: pontos/version/python.py:128:12: E1101: Instance of 'OutOfOrderTableProxy' has no 'add' member (no-member) # noqa: E501
            pyproject_toml["tool"].add("poetry", poetry_table)  # type: ignore

        pyproject_toml["tool"]["poetry"]["version"] = str(new_version)  # type: ignore # pylint: disable=line-too-long # noqa: E501

        self.project_file_path.write_text(
            tomlkit.dumps(pyproject_toml), encoding="utf-8"
        )

    @property
    def pyproject_toml(self) -> tomlkit.TOMLDocument:
        if self._pyproject_toml:
            return self._pyproject_toml

        if not self.project_file_path.exists():
            raise VersionError("pyproject.toml file not found.")

        self._pyproject_toml = tomlkit.parse(
            self.project_file_path.read_text(encoding="utf-8")
        )

        return self._pyproject_toml

    @property
    def version_file_path(self) -> Path:
        if self._version_file_path:
            return self._version_file_path

        if (
            "tool" not in self.pyproject_toml
            or "pontos" not in self.pyproject_toml["tool"]  # type: ignore
            or "version" not in self.pyproject_toml["tool"]["pontos"]  # type: ignore # pylint: disable=line-too-long # noqa: E501
        ):
            raise VersionError(
                "[tool.pontos.version] section missing "
                f"in {self.project_file_path}."
            )

        pontos_version_settings = self.pyproject_toml["tool"]["pontos"][  # type: ignore # pylint: disable=line-too-long # noqa: E501
            "version"
        ]

        try:
            self._version_file_path = Path(
                pontos_version_settings["version-module-file"]  # type: ignore
            )
            return self._version_file_path
        except tomlkit.exceptions.NonExistentKey:
            raise VersionError(
                "version-module-file key not set in [tool.pontos.version] "
                f"section of {str(self.project_file_path)}."
            ) from None

    def get_current_version(self) -> Version:
        version_module_name = self.version_file_path.stem
        module_parts = list(self.version_file_path.parts[:-1]) + [
            version_module_name
        ]
        module_name = ".".join(module_parts)
        try:
            spec = importlib.util.spec_from_file_location(
                module_name, self.version_file_path
            )
            if not spec:
                raise VersionError(
                    f"Could not load version from '{module_name}'. "
                )

            version_module = importlib.util.module_from_spec(spec)
            spec.loader.exec_module(version_module)  # type: ignore[union-attr]
            return PEP440VersioningScheme.parse_version(
                version_module.__version__
            )
        except FileNotFoundError:
            raise VersionError(
                f"Could not load version from '{module_name}'. "
                f"{self.version_file_path} not found."
            ) from None
        except ModuleNotFoundError:
            raise VersionError(
                f"Could not load version from '{module_name}'. Import failed."
            ) from None

    def verify_version(
        self, version: Union[Literal["current"], Version, None]
    ) -> None:
        current_version = self.get_current_version()
        pyproject_version = self._get_version_from_pyproject_toml()

        if pyproject_version != current_version:
            raise VersionError(
                f"The version {pyproject_version} in "
                f"{str(self.project_file_path)} doesn't match the current "
                f"version {current_version}."
            )

        if version and version != "current":
            if version != current_version:
                raise VersionError(
                    f"Provided version {version} does not match the "
                    f"current version {current_version}."
                )

    def update_version(
        self, new_version: Version, *, force: bool = False
    ) -> VersionUpdate:
        new_pep440_version = PEP440VersioningScheme.from_version(new_version)

        try:
            try:
                current_version = self.get_current_version()
            except VersionError:
                # maybe no version module exists yet. fallback to version from
                # pyproject.toml
                current_version = self._get_version_from_pyproject_toml()

            current_converted_version = self.versioning_scheme.from_version(
                current_version
            )

            if not force and new_pep440_version == current_version:
                return VersionUpdate(
                    previous=current_converted_version, new=new_version
                )
        except VersionError:
            # just ignore current version and override it
            current_converted_version = None

        try:
            self._update_pyproject_version(new_version=new_pep440_version)
        except OSError as e:
            raise VersionError(
                "Unable to update version in "
                f"{self.project_file_path.absolute()}. Error was {e}"
            ) from e

        try:
            self._update_version_file(new_version=new_pep440_version)
        except OSError as e:
            raise VersionError(
                "Unable to update version in "
                f"{self.version_file_path.absolute()}. Error was {e}"
            ) from e

        return VersionUpdate(
            previous=current_converted_version,
            new=new_version,
            changed_files=[self.version_file_path, self.project_file_path],
        )
pontos-25.3.2/pontos/version/helper.py000066400000000000000000000046261476255566300200200ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2021-2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later
#

from typing import Iterator, Optional

from pontos.git import DEFAULT_TAG_SORT_SUFFIX, Git, TagSort

from ._errors import VersionError
from ._version import ParseVersionFuncType, Version


def get_last_release_versions(
    parse_version: ParseVersionFuncType,
    *,
    git: Optional[Git] = None,
    git_tag_prefix: Optional[str] = "",
    ignore_pre_releases: Optional[bool] = False,
    tag_name: Optional[str] = None,
) -> Iterator[Version]:
    """Get the last released Versions from git.

    Args:
        git: Git instance to use
        git_tag_prefix: Git tag prefix to consider
        ignore_pre_release: Ignore pre releases and only consider non pre
            releases. Default is False.
        tag_name: A pattern for filtering the tags. For example: "1.2.*"

    Returns:
        List of released versions
    """
    git = git or Git()
    tag_list = git.list_tags(
        sort=TagSort.VERSION,
        sort_suffix=DEFAULT_TAG_SORT_SUFFIX,
        tag_name=tag_name,
    )
    tag_list.reverse()

    for tag in tag_list:
        last_release_version = tag.strip(git_tag_prefix)

        try:
            version = parse_version(last_release_version)
        except VersionError:
            # be safe and ignore invalid versions
            continue

        if version.is_pre_release and ignore_pre_releases:
            continue

        yield version


def get_last_release_version(
    parse_version: ParseVersionFuncType,
    *,
    git: Optional[Git] = None,
    git_tag_prefix: Optional[str] = "",
    ignore_pre_releases: Optional[bool] = False,
    tag_name: Optional[str] = None,
) -> Optional[Version]:
    """Get the last released Version from git.

    Args:
        git: Git instance to use
        git_tag_prefix: Git tag prefix to consider
        ignore_pre_release: Ignore pre releases and only consider non pre
            releases. Default is False.
        tag_name: A pattern for filtering the tags. For example: "1.2.*"

    Returns:
        Last released git tag as Version if tags were found
        or None
    """

    it = get_last_release_versions(
        git=git,
        parse_version=parse_version,
        git_tag_prefix=git_tag_prefix,
        ignore_pre_releases=ignore_pre_releases,
        tag_name=tag_name,
    )

    try:
        return next(it)
    except StopIteration:
        return None
pontos-25.3.2/pontos/version/project.py000066400000000000000000000061411476255566300202010ustar00rootroot00000000000000# Copyright (C) 2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later
#


from typing import List, Literal, Type, Union

from ._errors import ProjectError
from ._version import Version, VersionUpdate
from .commands import VersionCommand, get_commands
from .schemes import VersioningScheme

__all__ = ("Project",)


class Project:
    """
    A project for handling versioning

    Example:
        .. code-block:: python

            from pontos.version.scheme import PEP440VersioningScheme
            from pontos.version.project import Project

            project = Project(PEP440VersioningScheme)
    """

    def __init__(
        self, versioning_scheme: Union[VersioningScheme, Type[VersioningScheme]]
    ) -> None:
        """
        Creates a new project instance

        Args:
            versioning_scheme: Scheme for version handling

        Raises:
            ProjectError: If no fitting VersionCommand could be found
        """
        self._versioning_scheme = versioning_scheme
        self._commands = self._gather_commands()

    def _gather_commands(self) -> List[VersionCommand]:
        """
        Initialize the project with the fitting VersionCommands of the current
        working directory

        Raises:
            ProjectError: If no fitting VersionCommand could be found
        """
        commands = []
        for cmd in get_commands():
            command = cmd(versioning_scheme=self._versioning_scheme)
            if command.project_found():
                commands.append(command)

        if not commands:
            raise ProjectError("No project settings file found")

        return commands

    def update_version(
        self, new_version: Version, *, force: bool = False
    ) -> VersionUpdate:
        """
        Update the current version of this project

        Args:
            new_version: Use this version in the update
            force: Force updating the version even if the current version is the
                same as the new version

        Returns:
            The version update including the changed files
        """
        update = self._commands[0].update_version(new_version, force=force)
        for cmd in self._commands[1:]:
            next_update = cmd.update_version(new_version, force=force)
            update.changed_files.extend(next_update.changed_files)

        return update

    def get_current_version(self) -> Version:
        """
        Get the current version of the project

        Returns:
            The current version
        """
        return self._commands[0].get_current_version()

    def verify_version(
        self, version: Union[Literal["current"], Version]
    ) -> None:
        """
        Verify the current version of this project

        Args:
            version: Version to check against the current applied version of
                this project. If version is "current" the command should verify
                if all version information is consistent, for example if the
                version information in several files is the same.
        """
        for cmd in self._commands:
            cmd.verify_version(version)
pontos-25.3.2/pontos/version/schemes/000077500000000000000000000000001476255566300176065ustar00rootroot00000000000000pontos-25.3.2/pontos/version/schemes/__init__.py000066400000000000000000000030421476255566300217160ustar00rootroot00000000000000# Copyright (C) 2023 Greenbone Networks GmbH
#
# SPDX-License-Identifier: GPL-3.0-or-later
#

from argparse import ArgumentTypeError

from ._pep440 import PEP440VersioningScheme
from ._scheme import VersioningScheme
from ._semantic import SemanticVersioningScheme

__all__ = (
    "VERSIONING_SCHEMES",
    "versioning_scheme_argument_type",
    "VersioningScheme",
    "PEP440VersioningScheme",
    "SemanticVersioningScheme",
)

#: Dictionary with available versioning schemes
VERSIONING_SCHEMES: dict[str, type[VersioningScheme]] = {
    "pep440": PEP440VersioningScheme,
    "semver": SemanticVersioningScheme,
}


def versioning_scheme_argument_type(value: str) -> type[VersioningScheme]:
    """
    Verifies if the passed value is a valid versioning scheme and returns
    the corresponding versioning scheme.

    Intended to be used as in `ArgumentParser.add_argument` as the type.

    Raises:
        ArgumentTypeError: If the passed value is not a valid versioning scheme

    Example:
        .. code-block:: python

            from argparse import ArgumentParser
            from pontos.version.scheme versioning_scheme_argument_type

            parser = ArgumentParser()
            parser.add_argument(
                "--versioning-scheme",
                type=versioning_scheme_argument_type,
            )
    """
    try:
        return VERSIONING_SCHEMES[value]
    except KeyError:
        raise ArgumentTypeError(
            f"invalid value {value}. Expected one of "
            f"{', '.join(VERSIONING_SCHEMES.keys())}."
        ) from None
pontos-25.3.2/pontos/version/schemes/_pep440.py000066400000000000000000000341271476255566300213420ustar00rootroot00000000000000# Copyright (C) 2023 Greenbone Networks GmbH
#
# SPDX-License-Identifier: GPL-3.0-or-later
#

import re
from typing import Any, Optional, Tuple

from packaging.version import InvalidVersion
from packaging.version import Version as PackagingVersion

from .._calculator import VersionCalculator
from .._errors import VersionError
from .._version import Version
from ._scheme import VersioningScheme

__all__ = (
    "PEP440Version",
    "PEP440VersionCalculator",
    "PEP440VersioningScheme",
)

_LOCAL_RELEASE_REGEXP = re.compile(
    r"^(?P[a-zA-Z]+)(?P0|[1-9][0-9]*)$"
)
_PRE_RELEASE_NAME = {"a": "alpha", "b": "beta"}


def _pre_release_name(name: str) -> str:
    return _PRE_RELEASE_NAME.get(name, name)


class PEP440Version(Version):
    """
    A class handling PEP 440 based version information
    """

    def __init__(self, version: str) -> None:
        super().__init__(version)
        self._version = PackagingVersion(version)
        self._parse_local()

    def _parse_local(self):
        self._local = None
        if self._version.local:
            match = _LOCAL_RELEASE_REGEXP.match(self._version.local)
            if match:
                self._local = (
                    match.group("name"),
                    int(match.group("version")),
                )

    @property
    def major(self) -> int:
        """The first item of the version"""
        return self._version.major

    @property
    def minor(self) -> int:
        """The second item of :attr:`release` or ``0`` if unavailable."""
        return self._version.minor

    @property
    def patch(self) -> int:
        """The third item of :attr:`release` or ``0`` if unavailable."""
        return self._version.micro

    @property
    def pre(self) -> Optional[Tuple[str, int]]:
        """The pre-release segment of the version."""
        if not self._version.pre:
            return None

        return (_pre_release_name(self._version.pre[0]), self._version.pre[1])

    @property
    def dev(self) -> Optional[int]:
        """The development number of the version."""
        return self._version.dev

    @property
    def local(self) -> Optional[Tuple[str, int]]:
        """The local version segment of the version."""
        return self._local

    @property
    def is_pre_release(self) -> bool:
        """
        Whether this version is a pre-release (alpha, beta, release candidate
        and development).
        """
        return self._version.is_prerelease

    @property
    def is_dev_release(self) -> bool:
        """Whether this version is a development release."""
        return self._version.is_devrelease

    @property
    def is_alpha_release(self) -> bool:
        """Whether this version is a alpha release."""
        return bool(self.is_pre_release and self.pre and self.pre[0] == "alpha")

    @property
    def is_beta_release(self) -> bool:
        """Whether this version is a beta release."""
        return bool(self.is_pre_release and self.pre and self.pre[0] == "beta")

    @property
    def is_release_candidate(self) -> bool:
        """Whether this version is a release candidate."""
        return bool(self.is_pre_release and self.pre and self.pre[0] == "rc")

    @classmethod
    def from_string(cls, version: str) -> "PEP440Version":
        """
        Create a version from a version string

        Args:
            version: Version string to parse

        Raises:
            VersionError: If the version string is invalid.

        Returns:
            A new version instance
        """
        try:
            return cls(version)
        except InvalidVersion as e:
            raise VersionError(e) from None

    @classmethod
    def from_version(cls, version: "Version") -> "PEP440Version":
        """
        Convert a version (if necessary)

        This method can be used to convert version instances from different
        versioning schemes.
        """

        if isinstance(version, cls):
            return version

        try:
            # try to parse the original version string
            return cls.from_string(version.parsed_version)
        except VersionError:
            pass

        version_local = (
            f"+{version.local[0]}{version.local[1]}" if version.local else ""
        )
        if version.is_dev_release:
            if not version.pre:
                new_version = cls.from_string(
                    f"{version.major}."
                    f"{version.minor}."
                    f"{version.patch}"
                    f".dev{version.dev}"
                    f"{version_local}"
                )
            else:
                new_version = cls.from_string(
                    f"{version.major}."
                    f"{version.minor}."
                    f"{version.patch}"
                    f"-{version.pre[0]}{version.pre[1]}"
                    f".dev{version.dev}"
                )
        elif version.is_pre_release:
            new_version = cls.from_string(
                f"{version.major}."
                f"{version.minor}."
                f"{version.patch}"
                f"-{version.pre[0]}{version.pre[1]}"  # type: ignore[index]
                f"{version_local}"
            )
        else:
            new_version = cls.from_string(str(version))

        new_version._parsed_version = version.parsed_version
        return new_version

    def __eq__(self, other: Any) -> bool:
        if other is None:
            return False
        if isinstance(other, str):
            # allow to compare against "current" for now
            return False
        if not isinstance(other, Version):
            raise ValueError(f"Can't compare {type(self)} with {type(other)}")
        if not isinstance(other, type(self)):
            other = self.from_version(other)
        return self._version == other._version

    def __ne__(self, other: Any) -> bool:
        if other is None:
            return True
        if isinstance(other, str):
            # allow to compare against "current" for now
            return True
        if not isinstance(other, Version):
            raise ValueError(f"Can't compare {type(self)} with {type(other)}")
        if not isinstance(other, type(self)):
            other = self.from_version(other)
        return self._version != other._version

    def __gt__(self, other: Any) -> bool:
        if not isinstance(other, Version):
            raise ValueError(f"Can't compare {type(self)} with {type(other)}")
        if not isinstance(other, type(self)):
            other = self.from_version(other)
        return self._version > other._version

    def __ge__(self, other: Any) -> bool:
        if not isinstance(other, Version):
            raise ValueError(f"Can't compare {type(self)} with {type(other)}")
        if not isinstance(other, type(self)):
            other = self.from_version(other)
        return self._version >= other._version

    def __lt__(self, other: Any) -> bool:
        if not isinstance(other, Version):
            raise ValueError(f"Can't compare {type(self)} with {type(other)}")
        if not isinstance(other, type(self)):
            other = self.from_version(other)
        return self._version < other._version

    def __le__(self, other: Any) -> bool:
        if not isinstance(other, Version):
            raise ValueError(f"Can't compare {type(self)} with {type(other)}")
        if not isinstance(other, type(self)):
            other = self.from_version(other)
        return self._version <= other._version

    def __str__(self) -> str:
        return str(self._version)


class PEP440VersionCalculator(VersionCalculator):
    version_cls = PEP440Version

    """
    A PEP 440 version calculator
    """

    @classmethod
    def next_dev_version(cls, current_version: Version) -> Version:
        """
        Get the next development version from a valid version

        Examples:
            "1.2.3" will return "1.2.4.dev1"
            "1.2.3.dev1" will return "1.2.3.dev2"
        """
        if current_version.is_dev_release:
            if current_version.pre:
                return cls.version_from_string(
                    f"{current_version.major}."  # type: ignore[operator]
                    f"{current_version.minor}."
                    f"{current_version.patch}"
                    f"-{current_version.pre[0]}{current_version.pre[1]}"  # type: ignore[index] # noqa: E501
                    f".dev{current_version.dev + 1}"  # type: ignore[operator]
                )
            return cls.version_from_string(
                f"{current_version.major}."  # type: ignore[operator]
                f"{current_version.minor}."
                f"{current_version.patch}"
                f".dev{current_version.dev + 1}"  # type: ignore[operator]
            )

        if current_version.is_pre_release:
            return cls.version_from_string(
                f"{current_version.major}."
                f"{current_version.minor}."
                f"{current_version.patch}"
                f"{current_version.pre[0]}{current_version.pre[1] + 1}.dev1"  # type: ignore[index] # noqa: E501
            )

        return cls.version_from_string(
            f"{current_version.major}."
            f"{current_version.minor}."
            f"{current_version.patch + 1}.dev1"
        )

    @classmethod
    def next_alpha_version(cls, current_version: Version) -> Version:
        """
        Get the next alpha version from a valid version

        Examples:
            "1.2.3" will return "1.2.4a1"
            "1.2.3.dev1" will return "1.2.3a1"
        """
        if current_version.is_dev_release:
            if current_version.pre:
                if current_version.pre[0] == "alpha":
                    return cls.version_from_string(
                        f"{current_version.major}."
                        f"{current_version.minor}."
                        f"{current_version.patch}"
                        f"a{current_version.pre[1]}"
                    )
                return cls.version_from_string(
                    f"{current_version.major}."
                    f"{current_version.minor}."
                    f"{current_version.patch + 1}a1"
                )
            return cls.version_from_string(
                f"{current_version.major}."
                f"{current_version.minor}."
                f"{current_version.patch}a1"
            )

        if current_version.is_alpha_release:
            return cls.version_from_string(
                f"{current_version.major}."
                f"{current_version.minor}."
                f"{current_version.patch}a{current_version.pre[1] + 1}"  # type: ignore[index] # noqa: E501
            )

        return cls.version_from_string(
            f"{current_version.major}."
            f"{current_version.minor}."
            f"{current_version.patch + 1}a1"
        )

    @classmethod
    def next_beta_version(cls, current_version: Version) -> Version:
        """
        Get the next beta version from a valid version

        Examples:
            "1.2.3" will return "1.2.4b1"
            "1.2.3.dev1" will return "1.2.3b1"
        """
        if current_version.is_dev_release and current_version.pre:
            if current_version.pre[0] == "beta":
                return cls.version_from_string(
                    f"{current_version.major}."
                    f"{current_version.minor}."
                    f"{current_version.patch}"
                    f"b{current_version.pre[1]}"
                )
            if current_version.pre[0] == "rc":
                return cls.version_from_string(
                    f"{current_version.major}."
                    f"{current_version.minor}."
                    f"{current_version.patch + 1}b1"
                )
        if current_version.is_dev_release or current_version.is_alpha_release:
            return cls.version_from_string(
                f"{current_version.major}."
                f"{current_version.minor}."
                f"{current_version.patch}b1"
            )

        if current_version.is_beta_release:
            return cls.version_from_string(
                f"{current_version.major}."
                f"{current_version.minor}."
                f"{current_version.patch}"
                f"b{current_version.pre[1] + 1}"  # type: ignore[index]
            )
        return cls.version_from_string(
            f"{current_version.major}."
            f"{current_version.minor}."
            f"{current_version.patch + 1}b1"
        )

    @classmethod
    def next_release_candidate_version(
        cls, current_version: Version
    ) -> Version:
        """
        Get the next release candidate version from a valid version

        Examples:
            "1.2.3" will return "1.2.4rc1"
            "1.2.3.dev1" will return "1.2.3rc1"
        """
        if current_version.is_dev_release:
            if current_version.pre and current_version.pre[0] == "rc":
                return cls.version_from_string(
                    f"{current_version.major}."
                    f"{current_version.minor}."
                    f"{current_version.patch}"
                    f"rc{current_version.pre[1]}"
                )
            return cls.version_from_string(
                f"{current_version.major}."
                f"{current_version.minor}."
                f"{current_version.patch}rc1"
            )

        if current_version.is_alpha_release or current_version.is_beta_release:
            return cls.version_from_string(
                f"{current_version.major}."
                f"{current_version.minor}."
                f"{current_version.patch}rc1"
            )

        if current_version.is_release_candidate:
            return cls.version_from_string(
                f"{current_version.major}."
                f"{current_version.minor}."
                f"{current_version.patch}rc{current_version.pre[1] + 1}"  # type: ignore[index] # noqa: E501
            )

        return cls.version_from_string(
            f"{current_version.major}."
            f"{current_version.minor}."
            f"{current_version.patch + 1}rc1"
        )


class PEP440VersioningScheme(VersioningScheme):
    """
    PEP 440 versioning scheme
    """

    version_calculator_cls = PEP440VersionCalculator
    version_cls = PEP440Version
    name = "PEP440"
pontos-25.3.2/pontos/version/schemes/_scheme.py000066400000000000000000000027661476255566300215760ustar00rootroot00000000000000# Copyright (C) 2023 Greenbone Networks GmbH
#
# SPDX-License-Identifier: GPL-3.0-or-later
#

from abc import ABC
from typing import Type

from .._calculator import VersionCalculator
from .._version import Version


class VersioningScheme(ABC):
    """
    An abstract base class for versioning schemes

    Example:
        Example on how to implement a new VersioningScheme

        .. code-block:: python

            from pontos.version.scheme import VersioningScheme

            class MyVersioningScheme(VersioningScheme):
                version_cls = MyVersion
                version_calculator_cls = MyVersionCalculator
    """

    version_cls: Type[Version]
    version_calculator_cls: Type[VersionCalculator]
    name: str

    @classmethod
    def parse_version(cls, version: str) -> Version:
        """
        Parse a version from a version string

        Raises:
            :py:class:`pontos.version.error.VersionError`: If the version
                string contains an invalid version

        Returns:
            A version instance
        """
        return cls.version_cls.from_string(version)

    @classmethod
    def from_version(cls, version: Version) -> Version:
        return cls.version_cls.from_version(version)

    @classmethod
    def calculator(cls) -> Type[VersionCalculator]:
        """
        Return a matching version calculator for the implemented versioning
        schema.

        Returns:
            A version calculator
        """
        return cls.version_calculator_cls
pontos-25.3.2/pontos/version/schemes/_semantic.py000066400000000000000000000406751476255566300221360ustar00rootroot00000000000000# Copyright (C) 2023 Greenbone Networks GmbH
#
# SPDX-License-Identifier: GPL-3.0-or-later
#

import re
from typing import Any, Optional, Tuple

from semver import VersionInfo

from .._calculator import VersionCalculator
from .._errors import VersionError
from .._version import Version
from ._scheme import VersioningScheme

# Note: This regex currently support any kind of
# word-number combination for pre releases
_PRE_RELEASE_REGEXP = re.compile(
    r"^(?P[a-zA-Z]+)(?P0|[1-9][0-9]*)"
    r"(?:-(?P[a-zA-Z]+)(?P0|[1-9][0-9]*))?$"
)


class SemanticVersion(Version):
    """
    A Version implementation based on
    `Semantic Versioning`_
    """

    def __init__(
        self,
        version: str,
    ) -> None:
        super().__init__(version)
        self._version_info = VersionInfo.parse(version)
        self._parse_build()
        self._parse_pre_release()

    def _parse_pre_release(self) -> None:
        self._dev = None
        self._pre_release = None

        if self._version_info.prerelease:
            match = _PRE_RELEASE_REGEXP.match(self._version_info.prerelease)
            if not match:
                raise VersionError(
                    f"Invalid prerelease {self._version_info.prerelease} in "
                    f"{self._version_info}"
                )

            name = match.group("name")
            version = int(match.group("version"))

            if name == "dev":
                self._dev = version
            else:
                self._pre_release = (
                    name,
                    version,
                )

            extra = match.group("extra")
            if extra == "dev":
                if name == "dev":
                    raise VersionError(
                        f"Invalid prerelease {self._version_info.prerelease} "
                        f"in {self._version_info}"
                    )
                self._dev = int(match.group("extra_version"))

    def _parse_build(self) -> None:
        self._build = None
        if self._version_info.build:
            match = _PRE_RELEASE_REGEXP.match(self._version_info.build)
            if match:
                self._build = (
                    match.group("name"),
                    int(match.group("version")),
                )

    @property
    def pre(self) -> Optional[Tuple[str, int]]:
        """The pre-release segment of the version."""
        return self._pre_release

    @property
    def dev(self) -> Optional[int]:
        """The development number of the version."""
        return self._dev

    @property
    def local(self) -> Optional[Tuple[str, int]]:
        """The local version segment of the version."""
        return self._build

    @property
    def is_pre_release(self) -> bool:
        """
        Whether this version is a pre-release (alpha, beta, release candidate).
        """
        return self._pre_release is not None

    @property
    def is_dev_release(self) -> bool:
        """Whether this version is a development release."""
        return self._dev is not None

    @property
    def is_alpha_release(self) -> bool:
        """Whether this version is a alpha release."""
        return self.is_pre_release and self.pre[0] == "alpha"  # type: ignore[index] # noqa: E501

    @property
    def is_beta_release(self) -> bool:
        """Whether this version is a beta release."""
        return self.is_pre_release and self.pre[0] == "beta"  # type: ignore[index] # noqa: E501

    @property
    def is_release_candidate(self) -> bool:
        """Whether this version is a release candidate."""
        return self.is_pre_release and self.pre[0] == "rc"  # type: ignore[index] # noqa: E501

    @property
    def major(self) -> int:
        """The first item of :attr:`release` or ``0`` if unavailable."""
        return self._version_info.major

    @property
    def minor(self) -> int:
        """The second item of :attr:`release` or ``0`` if unavailable."""
        return self._version_info.minor

    @property
    def patch(self) -> int:
        """The third item of :attr:`release` or ``0`` if unavailable."""
        return self._version_info.patch

    def __eq__(self, other: Any) -> bool:
        if other is None:
            return False
        if isinstance(other, str):
            # allow to compare against "current" for now
            return False
        if not isinstance(other, Version):
            raise ValueError(f"Can't compare {type(self)} with {type(other)}")
        if not isinstance(other, type(self)):
            other = self.from_version(other)

        return (
            self._version_info == other._version_info
            and self._version_info.build == other._version_info.build
        )

    def __ne__(self, other: Any) -> bool:
        return not self == other

    def __gt__(self, other: Any) -> bool:
        if not isinstance(other, Version):
            raise ValueError(f"Can't compare {type(self)} with {type(other)}")
        if not isinstance(other, type(self)):
            other = self.from_version(other)

        if self._version_info.to_tuple()[:3] > other._version_info[:3]:  # type: ignore[operator] # noqa: E501
            return True
        if self._version_info.to_tuple()[:3] < other._version_info[:3]:  # type: ignore[operator] # noqa: E501
            return False

        # major, minor and patch are equal
        if self.is_dev_release:
            if not other.is_pre_release and not other.is_dev_release:
                return False
            if not self.is_pre_release and other.is_pre_release:
                return False
            if not self.is_pre_release:
                return self.dev > other.dev  # type: ignore[operator]

            if self.is_pre_release:
                if other.is_dev_release and self.pre == other.pre:
                    return self.dev > other.dev  # type: ignore[operator]
                return self.pre > other.pre  # type: ignore[operator]

        # not a dev release
        if self.is_pre_release:
            if not other.is_pre_release and not other.is_dev_release:
                return False

            if other.is_pre_release:
                if other.is_dev_release:
                    return self.pre >= other.pre  # type: ignore[operator]
                return self.pre > other.pre  # type: ignore[operator]

            # other is a dev release
            return True

        if other.is_dev_release or other.is_pre_release:
            return True

        # both are equal
        return False

    def __ge__(self, other: Any) -> bool:
        if not isinstance(other, Version):
            raise ValueError(f"Can't compare {type(self)} with {type(other)}")
        if not isinstance(other, type(self)):
            other = self.from_version(other)
        return self > other or self == other

    def __lt__(self, other: Any) -> bool:
        if not isinstance(other, Version):
            raise ValueError(f"Can't compare {type(self)} with {type(other)}")
        if not isinstance(other, type(self)):
            other = self.from_version(other)
        return (not self > other) and self != other

    def __le__(self, other: Any) -> bool:
        if not isinstance(other, Version):
            raise ValueError(f"Can't compare {type(self)} with {type(other)}")
        if not isinstance(other, type(self)):
            other = self.from_version(other)
        return not self > other or self == other

    def __str__(self) -> str:
        """A string representation of the version"""
        return str(self._version_info)

    @classmethod
    def from_string(cls, version: str) -> "SemanticVersion":
        """
        Create a version from a version string

        Args:
            version: Version string to parse

        Raises:
            VersionError: If the version string is invalid.

        Returns:
            A new version instance
        """
        try:
            return cls(version)
        except ValueError as e:
            raise VersionError(e) from None

    @classmethod
    def from_version(cls, version: "Version") -> "SemanticVersion":
        """
        Convert a version (if necessary)

        This method can be used to convert version instances from different
        versioning schemes.
        """

        if isinstance(version, cls):
            return version

        try:
            # try to parse the original version string
            return cls.from_string(version.parsed_version)
        except VersionError:
            pass

        version_local = (
            f"+{version.local[0]}{version.local[1]}" if version.local else ""
        )
        if version.is_dev_release:
            if not version.pre:
                new_version = cls.from_string(
                    f"{version.major}."
                    f"{version.minor}."
                    f"{version.patch}"
                    f"-dev{version.dev}"
                    f"{version_local}"
                )
            else:
                new_version = cls.from_string(
                    f"{version.major}."
                    f"{version.minor}."
                    f"{version.patch}"
                    f"-{version.pre[0]}{version.pre[1]}"
                    f"-dev{version.dev}"
                )
        elif version.is_alpha_release:
            new_version = cls.from_string(
                f"{version.major}."
                f"{version.minor}."
                f"{version.patch}"
                f"-alpha{version.pre[1]}"  # type: ignore[index]
                f"{version_local}"
            )
        elif version.is_beta_release:
            new_version = cls.from_string(
                f"{version.major}."
                f"{version.minor}."
                f"{version.patch}"
                f"-beta{version.pre[1]}"  # type: ignore[index]
                f"{version_local}"
            )
        elif version.is_pre_release:
            new_version = cls.from_string(
                f"{version.major}."
                f"{version.minor}."
                f"{version.patch}"
                f"-{version.pre[0]}{version.pre[1]}"  # type: ignore[index]
                f"{version_local}"
            )
        else:
            new_version = cls.from_string(str(version))

        new_version._parsed_version = version.parsed_version
        return new_version


# pylint: disable=protected-access
class SemanticVersionCalculator(VersionCalculator):
    version_cls = SemanticVersion

    @classmethod
    def next_dev_version(cls, current_version: Version) -> Version:
        """
        Get the next development version from a valid version
        """
        if current_version.is_dev_release:
            if current_version.pre:
                return cls.version_from_string(
                    f"{current_version.major}."  # type: ignore[operator]
                    f"{current_version.minor}."
                    f"{current_version.patch}"
                    f"-{current_version.pre[0]}{current_version.pre[1]}"
                    f"-dev{current_version.dev + 1}"  # type: ignore[operator]
                )
            return cls.version_from_string(
                f"{current_version.major}."  # type: ignore[operator]
                f"{current_version.minor}."
                f"{current_version.patch}"
                f"-dev{current_version.dev + 1}"  # type: ignore[operator]
            )

        if current_version.is_pre_release:
            return cls.version_from_string(
                f"{current_version.major}."
                f"{current_version.minor}."
                f"{current_version.patch}-"
                f"{current_version.pre[0]}{current_version.pre[1] + 1}-dev1"  # type: ignore[index] # noqa: E501
            )

        return cls.version_from_string(
            f"{current_version.major}."
            f"{current_version.minor}."
            f"{current_version.patch + 1}"
            "-dev1"
        )

    @classmethod
    def next_alpha_version(cls, current_version: Version) -> Version:
        """
        Get the next alpha version from a valid version
        """
        if current_version.is_dev_release:
            if current_version.pre:
                if current_version.pre[0] == "alpha":
                    return cls.version_from_string(
                        f"{current_version.major}."
                        f"{current_version.minor}."
                        f"{current_version.patch}"
                        f"-alpha{current_version.pre[1]}"
                    )
                return cls.version_from_string(
                    f"{current_version.major}."
                    f"{current_version.minor}."
                    f"{current_version.patch + 1}-alpha1"
                )
            return cls.version_from_string(
                f"{current_version.major}."
                f"{current_version.minor}."
                f"{current_version.patch}-alpha1"
            )
        if current_version.is_alpha_release:
            return cls.version_from_string(
                f"{current_version.major}."
                f"{current_version.minor}."
                f"{current_version.patch}"
                f"-alpha{current_version.pre[1] + 1}"  # type: ignore[index]
            )
        return cls.version_from_string(
            f"{current_version.major}."
            f"{current_version.minor}."
            f"{current_version.patch + 1}"
            "-alpha1"
        )

    @classmethod
    def next_beta_version(cls, current_version: Version) -> Version:
        """
        Get the next alpha version from a valid version
        """
        if current_version.is_dev_release and current_version.pre:
            if current_version.pre[0] == "beta":
                return cls.version_from_string(
                    f"{current_version.major}."
                    f"{current_version.minor}."
                    f"{current_version.patch}"
                    f"-beta{current_version.pre[1]}"
                )
            if current_version.pre[0] == "rc":
                return cls.version_from_string(
                    f"{current_version.major}."
                    f"{current_version.minor}."
                    f"{current_version.patch + 1}-beta1"
                )
        if current_version.is_dev_release or current_version.is_alpha_release:
            return cls.version_from_string(
                f"{current_version.major}."
                f"{current_version.minor}."
                f"{current_version.patch}"
                "-beta1"
            )
        if current_version.is_beta_release:
            return cls.version_from_string(
                f"{current_version.major}."
                f"{current_version.minor}."
                f"{current_version.patch}"
                f"-beta{current_version.pre[1] + 1}"  # type: ignore[index]
            )
        return cls.version_from_string(
            f"{current_version.major}."
            f"{current_version.minor}."
            f"{current_version.patch + 1}"
            "-beta1"
        )

    @classmethod
    def next_release_candidate_version(
        cls, current_version: Version
    ) -> Version:
        """
        Get the next release candidate version from a valid version
        """
        if current_version.is_dev_release:
            if current_version.pre and current_version.pre[0] == "rc":
                return cls.version_from_string(
                    f"{current_version.major}."
                    f"{current_version.minor}."
                    f"{current_version.patch}"
                    f"-rc{current_version.pre[1]}"
                )
            return cls.version_from_string(
                f"{current_version.major}."
                f"{current_version.minor}."
                f"{current_version.patch}-rc1"
            )
        if current_version.is_alpha_release or current_version.is_beta_release:
            return cls.version_from_string(
                f"{current_version.major}."
                f"{current_version.minor}."
                f"{current_version.patch}-rc1"
            )
        if current_version.is_release_candidate:
            return cls.version_from_string(
                f"{current_version.major}."
                f"{current_version.minor}."
                f"{current_version.patch}"
                f"-rc{current_version.pre[1] + 1}"  # type: ignore[index]
            )
        return cls.version_from_string(
            f"{current_version.major}."
            f"{current_version.minor}."
            f"{current_version.patch + 1}"
            "-rc1"
        )


class SemanticVersioningScheme(VersioningScheme):
    version_cls = SemanticVersion
    version_calculator_cls = SemanticVersionCalculator
    name = "SemVer"
pontos-25.3.2/pyproject.toml000066400000000000000000000066621476255566300160760ustar00rootroot00000000000000[tool.poetry]
name = "pontos"
version = "25.3.2"
description = "Common utilities and tools maintained by Greenbone Networks"
authors = ["Greenbone AG "]
license = "GPL-3.0-or-later"
readme = "README.md"
homepage = "https://github.com/greenbone/pontos/"
repository = "https://github.com/greenbone/pontos/"
documentation = "https://greenbone.github.io/pontos/"

classifiers = [
  # Full list: https://pypi.org/pypi?%3Aaction=list_classifiers
  "Development Status :: 4 - Beta",
  "License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)", # pylint: disable=line-too-long
  "Environment :: Console",
  "Intended Audience :: Developers",
  "Programming Language :: Python :: 3.9",
  "Programming Language :: Python :: 3.10",
  "Programming Language :: Python :: 3.11",
  "Programming Language :: Python :: 3.12",
  "Operating System :: OS Independent",
  "Topic :: Software Development :: Libraries :: Python Modules",
]

packages = [
  { include = "pontos" },
  { include = "tests", format = "sdist" },
  { include = "poetry.lock", format = "sdist" },
]
include = ["pontos/updateheader/templates/", "pontos/github/pr_template.md"]

[tool.poetry.dependencies]
python = "^3.9"
colorful = ">=0.5.4"
tomlkit = ">=0.5.11"
packaging = ">=20.3"
httpx = { extras = ["http2"], version = ">=0.23" }
rich = ">=12.4.4"
python-dateutil = ">=2.8.2"
semver = ">=2.13"
lxml = ">=4.9.0"
shtab = ">=1.7.0"

[tool.poetry.group.dev.dependencies]
autohooks = ">=22.7.0"
autohooks-plugin-black = ">=22.7.0"
autohooks-plugin-ruff = ">=23.6.1"
autohooks-plugin-mypy = ">=23.3.0"
rope = ">=1.9.0"
coverage = ">=7.2"
myst-parser = ">=0.19.1"
Sphinx = ">=7.0.1"
furo = ">=2023.5.20"
sphinx-autobuild = ">=2021.3.14"

[tool.black]
line-length = 80
target-version = ['py39', 'py310', 'py311', 'py312']
exclude = '''
/(
    \.git
  | \.venv
  | \.github
  | \.vscode
  | _build
  | build
  | dist
  | docs
)/
'''

[tool.autohooks]
pre-commit = [
  'autohooks.plugins.black',
  'autohooks.plugins.ruff',
  'autohooks.plugins.mypy',
]
mode = "poetry"

[tool.isort]
profile = "black"
line_length = 80

[tool.ruff]
line-length = 80
target-version = "py39"

[tool.ruff.lint]
extend-select = ["I", "PLE", "PLW"]

[tool.mypy]
files = "pontos"
ignore_missing_imports = true
explicit_package_bases = true
allow_redefinition = true
exclude = 'pontos/updateheader/templates/.*/*\.py'

[[tool.mypy.overrides]]
module = "dateutil"
ignore_missing_imports = true

[tool.pontos.version]
version-module-file = "pontos/version/__version__.py"

[tool.coverage.run]
branch = true
omit = ["tests/*", "pontos/github/scripts/*", "*/__init__.py"]
source = ["pontos"]

[tool.poetry.scripts]
pontos = 'pontos:main'
pontos-version = 'pontos.version:main'
pontos-release = 'pontos.release:main'
pontos-update-header = 'pontos.updateheader:main'
pontos-changelog = 'pontos.changelog:main'
pontos-github = 'pontos.github:main'
pontos-github-actions = 'pontos.github.actions:main'
pontos-github-script = 'pontos.github.script:main'
pontos-nvd-cve = 'pontos.nvd.cve:cve_main'
pontos-nvd-cves = 'pontos.nvd.cve:cves_main'
pontos-nvd-cve-changes = 'pontos.nvd.cve_changes:main'
pontos-nvd-cpe = 'pontos.nvd.cpe:cpe_main'
pontos-nvd-cpes = 'pontos.nvd.cpe:cpes_main'
pontos-nvd-cpe-match = 'pontos.nvd.cpe_matches:cpe_match_main'
pontos-nvd-cpe-matches = 'pontos.nvd.cpe_matches:cpe_matches_main'
pontos-nvd-sources = 'pontos.nvd.source:main'

[build-system]
requires = ["poetry-core>=1.0.0"]
build-backend = "poetry.core.masonry.api"
pontos-25.3.2/tests/000077500000000000000000000000001476255566300143125ustar00rootroot00000000000000pontos-25.3.2/tests/__init__.py000066400000000000000000000015761476255566300164340ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2020-2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later
#

# pylint: disable=no-name-in-module,no-member,unnecessary-dunder-call

import builtins
import sys
from unittest import IsolatedAsyncioTestCase
from unittest.mock import AsyncMock

from pontos.testing import AsyncIteratorMock

if sys.version_info.minor < 10:
    # aiter and anext have been added in Python 3.10
    def aiter(obj):  # pylint: disable=redefined-builtin
        return obj.__aiter__()

    def anext(obj):  # pylint: disable=redefined-builtin
        return obj.__anext__()

else:

    def aiter(obj):  # pylint: disable=redefined-builtin
        return builtins.aiter(obj)

    def anext(obj):  # pylint: disable=redefined-builtin
        return builtins.anext(obj)


__all__ = (
    "IsolatedAsyncioTestCase",
    "AsyncMock",
    "AsyncIteratorMock",
    "aiter",
    "anext",
)
pontos-25.3.2/tests/changelog/000077500000000000000000000000001476255566300162415ustar00rootroot00000000000000pontos-25.3.2/tests/changelog/__init__.py000066400000000000000000000006271476255566300203570ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2020-2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later
#

import os
from contextlib import contextmanager
from pathlib import Path


@contextmanager
def use_cwd(path: Path) -> None:
    """
    Context Manager to change the current working directory temporaryly
    """
    current_cwd = Path.cwd()

    os.chdir(str(path))

    yield

    os.chdir(str(current_cwd))
pontos-25.3.2/tests/changelog/changelog.toml000066400000000000000000000005671476255566300210750ustar00rootroot00000000000000commit_types = [
    { message = "^add", group = "Added"},
    { message = "^remove", group = "Removed"},
    { message = "^change", group = "Changed"},
    { message = "^fix", group = "Bug Fixes"},
    { message = "^doc", group = "Documentation"},
    { message = "^refactor", group = "Refactor"},
    { message = "^test", group = "Testing"},
]

changelog_dir = "changelog"
pontos-25.3.2/tests/changelog/test_conventional_commits.py000066400000000000000000000436531476255566300241170ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2021-2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later
#

import unittest
from dataclasses import dataclass
from datetime import datetime
from pathlib import Path
from unittest.mock import MagicMock, patch

from pontos.changelog.conventional_commits import (
    ChangelogBuilder,
    ChangelogBuilderError,
    ConventionalCommits,
)
from pontos.testing import temp_directory


@dataclass
class StdOutput:
    stdout: bytes


class ChangelogBuilderTestCase(unittest.TestCase):
    maxDiff = None

    @patch("pontos.changelog.conventional_commits.Git", autospec=True)
    def test_changelog_builder_with_config(self, git_mock: MagicMock):
        today = datetime.today().strftime("%Y-%m-%d")

        own_path = Path(__file__).absolute().parent
        config_toml = own_path / "changelog.toml"

        git_mock.return_value.list_tags.return_value = ["v0.0.1"]
        git_mock.return_value.log.return_value = [
            "1234567 Add: foo bar",
            "8abcdef Add: bar baz",
            "8abcd3f Add bar baz",
            "8abcd3d Adding bar baz",
            "1337abc Change: bar to baz",
            "42a42a4 Remove: foo bar again",
            "fedcba8 Test: bar baz testing",
            "dead901 Refactor: bar baz ref",
            "fedcba8 Fix: bar baz fixing",
            "d0c4d0c Doc: bar baz documenting",
        ]

        expected_output = f"""## [0.0.2] - {today}

## Added
* foo bar [1234567](https://github.com/foo/bar/commit/1234567)
* bar baz [8abcdef](https://github.com/foo/bar/commit/8abcdef)

## Removed
* foo bar again [42a42a4](https://github.com/foo/bar/commit/42a42a4)

## Changed
* bar to baz [1337abc](https://github.com/foo/bar/commit/1337abc)

## Bug Fixes
* bar baz fixing [fedcba8](https://github.com/foo/bar/commit/fedcba8)

## Documentation
* bar baz documenting [d0c4d0c](https://github.com/foo/bar/commit/d0c4d0c)

## Refactor
* bar baz ref [dead901](https://github.com/foo/bar/commit/dead901)

## Testing
* bar baz testing [fedcba8](https://github.com/foo/bar/commit/fedcba8)

[0.0.2]: https://github.com/foo/bar/compare/v0.0.1...v0.0.2"""

        changelog_builder = ChangelogBuilder(
            repository="foo/bar",
            config=config_toml,
        )
        changelog = changelog_builder.create_changelog(
            last_version="0.0.1", next_version="0.0.2"
        )

        self.assertEqual(changelog, expected_output)

        git_mock.return_value.log.assert_called_once_with(
            "v0.0.1..HEAD", oneline=True
        )

    @patch("pontos.changelog.conventional_commits.Git", autospec=True)
    def test_changelog_builder_no_commits(self, git_mock: MagicMock):
        today = datetime.today().strftime("%Y-%m-%d")

        own_path = Path(__file__).absolute().parent
        config_toml = own_path / "changelog.toml"
        expected_output = f"""## [0.0.2] - {today}

[0.0.2]: https://github.com/foo/bar/compare/v0.0.1...v0.0.2"""

        git_mock.return_value.log.return_value = []

        changelog_builder = ChangelogBuilder(
            repository="foo/bar",
            config=config_toml,
        )
        changelog = changelog_builder.create_changelog(
            last_version="0.0.1", next_version="0.0.2"
        )

        self.assertEqual(changelog, expected_output)

        git_mock.return_value.log.assert_called_once_with(
            "v0.0.1..HEAD", oneline=True
        )

    @patch("pontos.changelog.conventional_commits.Git", autospec=True)
    def test_changelog_builder_no_last_version(self, git_mock: MagicMock):
        today = datetime.today().strftime("%Y-%m-%d")

        own_path = Path(__file__).absolute().parent
        config_toml = own_path / "changelog.toml"

        git_mock.return_value.log.return_value = [
            "1234567 Add: foo bar",
            "8abcdef Add: bar baz",
            "8abcd3f Add bar baz",
            "8abcd3d Adding bar baz",
            "1337abc Change: bar to baz",
            "42a42a4 Remove: foo bar again",
            "fedcba8 Test: bar baz testing",
            "dead901 Refactor: bar baz ref",
            "fedcba8 Fix: bar baz fixing",
            "d0c4d0c Doc: bar baz documenting",
        ]
        git_mock.return_value.rev_list.return_value = ["123"]

        expected_output = f"""## [0.0.2] - {today}

## Added
* foo bar [1234567](https://github.com/foo/bar/commit/1234567)
* bar baz [8abcdef](https://github.com/foo/bar/commit/8abcdef)

## Removed
* foo bar again [42a42a4](https://github.com/foo/bar/commit/42a42a4)

## Changed
* bar to baz [1337abc](https://github.com/foo/bar/commit/1337abc)

## Bug Fixes
* bar baz fixing [fedcba8](https://github.com/foo/bar/commit/fedcba8)

## Documentation
* bar baz documenting [d0c4d0c](https://github.com/foo/bar/commit/d0c4d0c)

## Refactor
* bar baz ref [dead901](https://github.com/foo/bar/commit/dead901)

## Testing
* bar baz testing [fedcba8](https://github.com/foo/bar/commit/fedcba8)

[0.0.2]: https://github.com/foo/bar/compare/123...v0.0.2"""

        changelog_builder = ChangelogBuilder(
            repository="foo/bar",
            config=config_toml,
        )
        changelog = changelog_builder.create_changelog(next_version="0.0.2")

        self.assertEqual(changelog, expected_output)

        git_mock.return_value.log.assert_called_once_with(oneline=True)

    @patch("pontos.changelog.conventional_commits.Git", autospec=True)
    def test_changelog_builder_no_next_version(self, git_mock: MagicMock):
        own_path = Path(__file__).absolute().parent
        config_toml = own_path / "changelog.toml"

        git_mock.return_value.log.return_value = [
            "1234567 Add: foo bar",
            "8abcdef Add: bar baz",
            "8abcd3f Add bar baz",
            "8abcd3d Adding bar baz",
            "1337abc Change: bar to baz",
            "42a42a4 Remove: foo bar again",
            "fedcba8 Test: bar baz testing",
            "dead901 Refactor: bar baz ref",
            "fedcba8 Fix: bar baz fixing",
            "d0c4d0c Doc: bar baz documenting",
        ]
        git_mock.return_value.rev_list.return_value = ["123"]

        expected_output = """## [Unreleased]

## Added
* foo bar [1234567](https://github.com/foo/bar/commit/1234567)
* bar baz [8abcdef](https://github.com/foo/bar/commit/8abcdef)

## Removed
* foo bar again [42a42a4](https://github.com/foo/bar/commit/42a42a4)

## Changed
* bar to baz [1337abc](https://github.com/foo/bar/commit/1337abc)

## Bug Fixes
* bar baz fixing [fedcba8](https://github.com/foo/bar/commit/fedcba8)

## Documentation
* bar baz documenting [d0c4d0c](https://github.com/foo/bar/commit/d0c4d0c)

## Refactor
* bar baz ref [dead901](https://github.com/foo/bar/commit/dead901)

## Testing
* bar baz testing [fedcba8](https://github.com/foo/bar/commit/fedcba8)

[Unreleased]: https://github.com/foo/bar/compare/v0.0.1...HEAD"""

        changelog_builder = ChangelogBuilder(
            repository="foo/bar",
            config=config_toml,
        )
        changelog = changelog_builder.create_changelog(last_version="0.0.1")

        self.assertEqual(changelog, expected_output)

        git_mock.return_value.log.assert_called_once_with(
            "v0.0.1..HEAD", oneline=True
        )

    @patch("pontos.changelog.conventional_commits.Git", autospec=True)
    def test_changelog_builder_no_next_and_last_version(
        self, git_mock: MagicMock
    ):
        own_path = Path(__file__).absolute().parent
        config_toml = own_path / "changelog.toml"

        git_mock.return_value.log.return_value = [
            "1234567 Add: foo bar",
            "8abcdef Add: bar baz",
            "8abcd3f Add bar baz",
            "8abcd3d Adding bar baz",
            "1337abc Change: bar to baz",
            "42a42a4 Remove: foo bar again",
            "fedcba8 Test: bar baz testing",
            "dead901 Refactor: bar baz ref",
            "fedcba8 Fix: bar baz fixing",
            "d0c4d0c Doc: bar baz documenting",
        ]
        git_mock.return_value.rev_list.return_value = ["123"]

        expected_output = """## [Unreleased]

## Added
* foo bar [1234567](https://github.com/foo/bar/commit/1234567)
* bar baz [8abcdef](https://github.com/foo/bar/commit/8abcdef)

## Removed
* foo bar again [42a42a4](https://github.com/foo/bar/commit/42a42a4)

## Changed
* bar to baz [1337abc](https://github.com/foo/bar/commit/1337abc)

## Bug Fixes
* bar baz fixing [fedcba8](https://github.com/foo/bar/commit/fedcba8)

## Documentation
* bar baz documenting [d0c4d0c](https://github.com/foo/bar/commit/d0c4d0c)

## Refactor
* bar baz ref [dead901](https://github.com/foo/bar/commit/dead901)

## Testing
* bar baz testing [fedcba8](https://github.com/foo/bar/commit/fedcba8)

[Unreleased]: https://github.com/foo/bar/compare/123...HEAD"""

        changelog_builder = ChangelogBuilder(
            repository="foo/bar",
            config=config_toml,
        )
        changelog = changelog_builder.create_changelog()

        self.assertEqual(changelog, expected_output)

        git_mock.return_value.log.assert_called_once_with(oneline=True)

    @patch("pontos.changelog.conventional_commits.Git", autospec=True)
    def test_changelog_builder_no_conventional_commits(
        self, git_mock: MagicMock
    ):
        today = datetime.today().strftime("%Y-%m-%d")

        own_path = Path(__file__).absolute().parent
        config_toml = own_path / "changelog.toml"
        expected_output = f"""## [0.0.2] - {today}

[0.0.2]: https://github.com/foo/bar/compare/v0.0.1...v0.0.2"""

        git_mock.return_value.list_tags.return_value = ["v0.0.1"]
        git_mock.return_value.log.return_value = [
            "1234567 foo bar",
            "8abcdef bar baz",
        ]
        changelog_builder = ChangelogBuilder(
            repository="foo/bar",
            config=config_toml,
        )
        changelog = changelog_builder.create_changelog(
            last_version="0.0.1", next_version="0.0.2"
        )

        self.assertEqual(changelog, expected_output)

        git_mock.return_value.log.assert_called_once_with(
            "v0.0.1..HEAD", oneline=True
        )

    def test_changelog_builder_config_file_not_exists(self):
        with (
            temp_directory() as temp_dir,
            self.assertRaisesRegex(
                ChangelogBuilderError,
                r"Changelog Config file '.*\.toml' does not exist\.",
            ),
        ):
            ChangelogBuilder(
                repository="foo/bar",
                config=temp_dir / "changelog.toml",
            )

    @patch("pontos.changelog.conventional_commits.Git", autospec=True)
    def test_changelog_builder_with_default_changelog_config(
        self, git_mock: MagicMock
    ):
        today = datetime.today().strftime("%Y-%m-%d")

        git_mock.return_value.log.return_value = [
            "1234567 Add: foo bar",
            "8abcdef Add: bar baz",
            "8abcd3f Add bar baz",
            "8abcd3d Adding bar baz",
            "1337abc Change: bar to baz",
            "42a42a4 Remove: foo bar again",
            "fedcba8 Test: bar baz testing",
            "dead901 Refactor: bar baz ref",
            "fedcba8 Fix: bar baz fixing",
            "d0c4d0c Doc: bar baz documenting",
            "a1c5a0b Deps: Update foo from 1.2.3 to 3.2.1",
        ]

        changelog_builder = ChangelogBuilder(
            repository="foo/bar",
        )
        expected_output = f"""## [0.0.2] - {today}

## Added
* foo bar [1234567](https://github.com/foo/bar/commit/1234567)
* bar baz [8abcdef](https://github.com/foo/bar/commit/8abcdef)

## Removed
* foo bar again [42a42a4](https://github.com/foo/bar/commit/42a42a4)

## Changed
* bar to baz [1337abc](https://github.com/foo/bar/commit/1337abc)

## Bug Fixes
* bar baz fixing [fedcba8](https://github.com/foo/bar/commit/fedcba8)

## Dependencies
* Update foo from 1.2.3 to 3.2.1 [a1c5a0b](https://github.com/foo/bar/commit/a1c5a0b)

[0.0.2]: https://github.com/foo/bar/compare/v0.0.1...v0.0.2"""

        changelog = changelog_builder.create_changelog(
            last_version="0.0.1", next_version="0.0.2"
        )
        self.assertEqual(changelog, expected_output)

        git_mock.return_value.log.assert_called_once_with(
            "v0.0.1..HEAD", oneline=True
        )

    @patch("pontos.changelog.conventional_commits.Git", autospec=True)
    def test_changelog_builder_with_empty_git_tag_prefix(
        self, git_mock: MagicMock
    ):
        today = datetime.today().strftime("%Y-%m-%d")

        git_mock.return_value.log.return_value = [
            "1234567 Add: foo bar",
            "8abcdef Add: bar baz",
            "8abcd3f Add bar baz",
            "8abcd3d Adding bar baz",
            "1337abc Change: bar to baz",
            "42a42a4 Remove: foo bar again",
            "fedcba8 Test: bar baz testing",
            "dead901 Refactor: bar baz ref",
            "fedcba8 Fix: bar baz fixing",
            "d0c4d0c Doc: bar baz documenting",
        ]

        changelog_builder = ChangelogBuilder(
            repository="foo/bar",
            git_tag_prefix="",
        )
        expected_output = f"""## [0.0.2] - {today}

## Added
* foo bar [1234567](https://github.com/foo/bar/commit/1234567)
* bar baz [8abcdef](https://github.com/foo/bar/commit/8abcdef)

## Removed
* foo bar again [42a42a4](https://github.com/foo/bar/commit/42a42a4)

## Changed
* bar to baz [1337abc](https://github.com/foo/bar/commit/1337abc)

## Bug Fixes
* bar baz fixing [fedcba8](https://github.com/foo/bar/commit/fedcba8)

[0.0.2]: https://github.com/foo/bar/compare/0.0.1...0.0.2"""

        changelog = changelog_builder.create_changelog(
            last_version="0.0.1", next_version="0.0.2"
        )
        self.assertEqual(changelog, expected_output)

        git_mock.return_value.log.assert_called_once_with(
            "0.0.1..HEAD", oneline=True
        )

    @patch("pontos.changelog.conventional_commits.Git", autospec=True)
    def test_write_changelog_to_file(self, git_mock: MagicMock):
        today = datetime.today().strftime("%Y-%m-%d")

        own_path = Path(__file__).absolute().parent
        config_toml = own_path / "changelog.toml"

        git_mock.return_value.list_tags.return_value = ["v0.0.1"]
        git_mock.return_value.log.return_value = [
            "1234567 Add: foo bar",
            "8abcdef Add: bar baz",
            "8abcd3f Add bar baz",
            "8abcd3d Adding bar baz",
            "1337abc Change: bar to baz",
            "42a42a4 Remove: foo bar again",
            "fedcba8 Test: bar baz testing",
            "dead901 Refactor: bar baz ref",
            "fedcba8 Fix: bar baz fixing",
            "d0c4d0c Doc: bar baz documenting",
        ]

        expected_output = f"""## [0.0.2] - {today}

## Added
* foo bar [1234567](https://github.com/foo/bar/commit/1234567)
* bar baz [8abcdef](https://github.com/foo/bar/commit/8abcdef)

## Removed
* foo bar again [42a42a4](https://github.com/foo/bar/commit/42a42a4)

## Changed
* bar to baz [1337abc](https://github.com/foo/bar/commit/1337abc)

## Bug Fixes
* bar baz fixing [fedcba8](https://github.com/foo/bar/commit/fedcba8)

## Documentation
* bar baz documenting [d0c4d0c](https://github.com/foo/bar/commit/d0c4d0c)

## Refactor
* bar baz ref [dead901](https://github.com/foo/bar/commit/dead901)

## Testing
* bar baz testing [fedcba8](https://github.com/foo/bar/commit/fedcba8)

[0.0.2]: https://github.com/foo/bar/compare/v0.0.1...v0.0.2"""

        changelog_builder = ChangelogBuilder(
            repository="foo/bar",
            config=config_toml,
        )

        with temp_directory() as temp_dir:
            changelog_file = temp_dir / "changelog.md"
            changelog_builder.create_changelog_file(
                changelog_file, last_version="0.0.1", next_version="0.0.2"
            )
            changelog = changelog_file.read_text(encoding="utf8")

        self.assertEqual(changelog, expected_output)

        git_mock.return_value.log.assert_called_once_with(
            "v0.0.1..HEAD", oneline=True
        )


class ConventionalCommitsTestCase(unittest.TestCase):
    @patch("pontos.changelog.conventional_commits.Git", autospec=True)
    def test_get_commits(self, git_mock: MagicMock):
        git_mock.return_value.list_tags.return_value = ["v0.0.1"]
        git_mock.return_value.log.return_value = [
            "1234567 Add: foo bar",
            "8abcdef Add: bar baz",
            "8abcd3f Add bar baz",
            "8abcd3d Adding bar baz",
            "1337abc Change: bar to baz",
            "42a42a4 Remove: foo bar again",
            "fedcba8 Test: bar baz testing",
            "dead901 Refactor: bar baz ref",
            "fedcba8 Fix: bar baz fixing",
            "d0c4d0c Doc: bar baz documenting",
        ]

        conventional_commits = ConventionalCommits()
        commits = conventional_commits.get_commits(from_ref="0.0.1")

        self.assertEqual(len(commits), 4)  # four commit types
        self.assertEqual(len(commits["Added"]), 2)
        self.assertEqual(len(commits["Changed"]), 1)
        self.assertEqual(len(commits["Removed"]), 1)
        self.assertEqual(len(commits["Bug Fixes"]), 1)

        removed = commits["Removed"][0]
        self.assertEqual(removed.commit_id, "42a42a4")
        self.assertEqual(removed.message, "foo bar again")

        commit_id, message = removed
        self.assertEqual(commit_id, "42a42a4")
        self.assertEqual(message, "foo bar again")

    def test_default_config(self):
        conventional_commits = ConventionalCommits()

        categories = conventional_commits.commit_types()

        self.assertEqual(len(categories), 5)

        add = categories[0]
        self.assertEqual(add["message"], "^add")
        self.assertEqual(add["group"], "Added")

        remove = categories[1]
        self.assertEqual(remove["message"], "^remove")
        self.assertEqual(remove["group"], "Removed")

        change = categories[2]
        self.assertEqual(change["message"], "^change")
        self.assertEqual(change["group"], "Changed")

        fix = categories[3]
        self.assertEqual(fix["message"], "^fix")
        self.assertEqual(fix["group"], "Bug Fixes")

        deps = categories[4]
        self.assertEqual(deps["message"], "^deps")
        self.assertEqual(deps["group"], "Dependencies")
pontos-25.3.2/tests/changelog/test_parser.py000066400000000000000000000022641476255566300211520ustar00rootroot00000000000000# Copyright (C) 2021-2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later
#
import unittest
from pathlib import Path

from pontos.changelog.main import parse_args
from pontos.version.schemes._pep440 import PEP440Version


class ParseArgsTestCase(unittest.TestCase):
    def test_parse_args(self):
        parsed_args = parse_args(
            [
                "-q",
                "--repository",
                "urghs/bla",
                "--config",
                "foo.toml",
                "--current-version",
                "1.2.3",
                "--next-version",
                "2.3.4",
                "--git-tag-prefix",
                "a",
                "--output",
                "changelog.md",
            ]
        )

        self.assertTrue(parsed_args.quiet)
        self.assertEqual(parsed_args.repository, "urghs/bla")
        self.assertEqual(parsed_args.config, Path("foo.toml"))
        self.assertEqual(parsed_args.current_version, PEP440Version("1.2.3"))
        self.assertEqual(parsed_args.next_version, PEP440Version("2.3.4"))
        self.assertEqual(parsed_args.git_tag_prefix, "a")
        self.assertEqual(parsed_args.output, Path("changelog.md"))
pontos-25.3.2/tests/cpe/000077500000000000000000000000001476255566300150615ustar00rootroot00000000000000pontos-25.3.2/tests/cpe/__init__.py000066400000000000000000000001321476255566300171660ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later
pontos-25.3.2/tests/cpe/test_cpe.py000066400000000000000000000703651476255566300172540ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022 - 2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later

# ruff: noqa: E501

import unittest

from pontos.cpe import ANY, CPE, NA, CPEParsingError, Part
from pontos.cpe._cpe import (
    bind_value_for_formatted_string,
    convert_double_backslash,
    split_cpe,
    unbind_value_from_formatted_string,
    unquote_attribute_value,
)


class SplitCpeTestCase(unittest.TestCase):
    def test_split_uri_cpe(self):
        parts = split_cpe("cpe:/o:microsoft:windows_xp:::pro")

        self.assertEqual(len(parts), 7)
        self.assertEqual(parts[0], "cpe")
        self.assertEqual(parts[1], "/o")
        self.assertEqual(parts[2], "microsoft")
        self.assertEqual(parts[3], "windows_xp")
        self.assertEqual(parts[4], "")
        self.assertEqual(parts[5], "")
        self.assertEqual(parts[6], "pro")

    def test_split_formatted_cpe(self):
        parts = split_cpe(
            "cpe:2.3:a:microsoft:internet_explorer:8.0.6001:beta:*:*:*:*:*:*"
        )

        self.assertEqual(len(parts), 13)
        self.assertEqual(parts[0], "cpe")
        self.assertEqual(parts[1], "2.3")
        self.assertEqual(parts[2], "a")
        self.assertEqual(parts[3], "microsoft")
        self.assertEqual(parts[4], "internet_explorer")
        self.assertEqual(parts[5], "8.0.6001")
        self.assertEqual(parts[6], "beta")
        self.assertEqual(parts[7], "*")
        self.assertEqual(parts[8], "*")
        self.assertEqual(parts[9], "*")
        self.assertEqual(parts[10], "*")
        self.assertEqual(parts[11], "*")
        self.assertEqual(parts[12], "*")

        parts = split_cpe("cpe:2.3:a:foo:bar\:mumble:1.0:*:*:*:*:*:*:*")

        self.assertEqual(len(parts), 13)
        self.assertEqual(parts[0], "cpe")
        self.assertEqual(parts[1], "2.3")
        self.assertEqual(parts[2], "a")
        self.assertEqual(parts[3], "foo")
        self.assertEqual(parts[4], "bar\\:mumble")
        self.assertEqual(parts[5], "1.0")
        self.assertEqual(parts[6], "*")
        self.assertEqual(parts[7], "*")
        self.assertEqual(parts[8], "*")
        self.assertEqual(parts[9], "*")
        self.assertEqual(parts[10], "*")
        self.assertEqual(parts[11], "*")
        self.assertEqual(parts[12], "*")


class ConvertDoubleBackslashTestCase(unittest.TestCase):
    def test_remove_backslash(self):
        self.assertEqual(convert_double_backslash("foo-bar"), "foo-bar")
        self.assertEqual(convert_double_backslash("foo\\bar"), "foo\\bar")
        self.assertEqual(convert_double_backslash("foo\\\\bar"), "foo\\bar")


class UnbindValueFromFormattedStringTestCase(unittest.TestCase):
    def test_unchanged(self):
        self.assertIsNone(unbind_value_from_formatted_string(None))
        self.assertEqual(unbind_value_from_formatted_string(ANY), ANY)
        self.assertEqual(unbind_value_from_formatted_string(NA), NA)

        self.assertEqual(
            unbind_value_from_formatted_string("foo_bar"), "foo_bar"
        )
        self.assertEqual(
            unbind_value_from_formatted_string("foo\\:bar"), "foo\\:bar"
        )
        self.assertEqual(
            unbind_value_from_formatted_string("1\\.2\\.3"), "1\\.2\\.3"
        )

    def test_quoting(self):
        self.assertEqual(
            unbind_value_from_formatted_string("foo-bar"), "foo\\-bar"
        )
        self.assertEqual(  # not sure if this can happen and if it's valid
            unbind_value_from_formatted_string("foo:bar"), "foo\\:bar"
        )
        self.assertEqual(
            unbind_value_from_formatted_string("1.2.3"), "1\\.2\\.3"
        )

    def test_asterisk(self):
        self.assertEqual(unbind_value_from_formatted_string("*foo"), "*foo")
        self.assertEqual(unbind_value_from_formatted_string("foo*"), "foo*")
        self.assertEqual(unbind_value_from_formatted_string("foo\\*"), "foo\\*")

        with self.assertRaisesRegex(
            CPEParsingError,
            "An unquoted asterisk must appear at the beginning or end of "
            "'foo\*bar'",
        ):
            unbind_value_from_formatted_string("foo*bar")

        with self.assertRaisesRegex(
            CPEParsingError,
            "An unquoted asterisk must appear at the beginning or end of "
            "'\*\*foo'",
        ):
            unbind_value_from_formatted_string("**foo")

    def test_question_mark(self):
        self.assertEqual(unbind_value_from_formatted_string("?foo"), "?foo")
        self.assertEqual(unbind_value_from_formatted_string("??foo"), "??foo")
        self.assertEqual(unbind_value_from_formatted_string("foo?"), "foo?")
        self.assertEqual(unbind_value_from_formatted_string("foo??"), "foo??")
        self.assertEqual(unbind_value_from_formatted_string("foo\\?"), "foo\\?")

        with self.assertRaisesRegex(
            CPEParsingError,
            "An unquoted question mark must appear at the beginning or end, "
            "or in a leading or trailing sequence 'foo\?bar'",
        ):
            unbind_value_from_formatted_string("foo?bar")


class BindValueForFormattedStringTestCase(unittest.TestCase):
    def test_any(self):
        self.assertEqual(bind_value_for_formatted_string(None), ANY)
        self.assertEqual(bind_value_for_formatted_string(""), ANY)
        self.assertEqual(bind_value_for_formatted_string(ANY), ANY)

    def test_na(self):
        self.assertEqual(bind_value_for_formatted_string(NA), NA)

    def test_remove_quoting(self):
        self.assertEqual(bind_value_for_formatted_string("1\\.2\\.3"), "1.2.3")
        # _ doesn't get quoted during unbinding therefore unquoting it here
        # doesn't really make sense bit it's in the standard!
        self.assertEqual(
            bind_value_for_formatted_string("foo\\_bar"), "foo_bar"
        )
        self.assertEqual(
            bind_value_for_formatted_string("foo\\-bar"), "foo-bar"
        )

    def test_unchanged(self):
        self.assertEqual(
            bind_value_for_formatted_string("foo\\:bar"), "foo\\:bar"
        )
        self.assertEqual(bind_value_for_formatted_string("?foo"), "?foo")
        self.assertEqual(bind_value_for_formatted_string("foo*"), "foo*")
        self.assertEqual(bind_value_for_formatted_string("foo\\*"), "foo\\*")


class UnquoteAttributeValueTestCase(unittest.TestCase):
    def test_unchanged(self):
        self.assertIsNone(unquote_attribute_value(None))
        self.assertEqual(unquote_attribute_value(""), "")
        self.assertEqual(unquote_attribute_value(ANY), ANY)
        self.assertEqual(unquote_attribute_value("?"), "?")
        self.assertEqual(unquote_attribute_value("foo-bar"), "foo-bar")
        self.assertEqual(unquote_attribute_value("foo_bar"), "foo_bar")
        self.assertEqual(unquote_attribute_value("1.2.3"), "1.2.3")

    def test_special(self):
        self.assertEqual(unquote_attribute_value("foo\\?bar"), "foo\\?bar")
        self.assertEqual(unquote_attribute_value("foo\\*bar"), "foo\\*bar")

    def test_unquote(self):
        self.assertEqual(unquote_attribute_value("foo\\\\bar"), "foo\\bar")
        self.assertEqual(unquote_attribute_value("foo\\:bar"), "foo:bar")
        self.assertEqual(unquote_attribute_value("1\\.2\\.3"), "1.2.3")


class CPETestCase(unittest.TestCase):
    def test_uri_binding(self):
        cpe_string = "cpe:/o:microsoft:windows_xp:::pro"
        cpe = CPE.from_string(cpe_string)

        self.assertEqual(str(cpe), "cpe:/o:microsoft:windows_xp:::pro")
        self.assertEqual(
            cpe.as_uri_binding(), "cpe:/o:microsoft:windows_xp:::pro"
        )
        self.assertEqual(
            cpe.as_formatted_string_binding(),
            "cpe:2.3:o:microsoft:windows_xp:*:*:pro:*:*:*:*:*",
        )
        self.assertTrue(cpe.is_uri_binding())
        self.assertFalse(cpe.is_formatted_string_binding())
        self.assertEqual(cpe.part, Part.OPERATING_SYSTEM)
        self.assertEqual(cpe.vendor, "microsoft")
        self.assertEqual(cpe.product, "windows_xp")
        self.assertEqual(cpe.edition, "pro")
        self.assertEqual(cpe.version, ANY)
        self.assertEqual(cpe.update, ANY)
        self.assertIsNone(cpe.language)
        self.assertIsNone(cpe.sw_edition)
        self.assertIsNone(cpe.target_sw)
        self.assertIsNone(cpe.target_hw)
        self.assertIsNone(cpe.other)

        cpe = CPE.from_string(
            "cpe:/a:foo%5cbar:big%24money_manager_2010:::~~special~ipod_touch~80gb~"
        )
        self.assertEqual(
            str(cpe),
            "cpe:/a:foo%5cbar:big%24money_manager_2010:::~~special~ipod_touch~80gb~",
        )
        self.assertEqual(
            cpe.as_uri_binding(),
            "cpe:/a:foo%5cbar:big%24money_manager_2010:::~~special~ipod_touch~80gb~",
        )
        self.assertEqual(
            cpe.as_formatted_string_binding(),
            "cpe:2.3:a:foo\\\\bar:big\$money_manager_2010:*:*:*:*:special:ipod_touch:80gb:*",
        )
        self.assertTrue(cpe.is_uri_binding())
        self.assertFalse(cpe.is_formatted_string_binding())
        self.assertEqual(cpe.part, Part.APPLICATION)
        self.assertEqual(cpe.vendor, "foo\\bar")
        self.assertEqual(cpe.product, "big$money_manager_2010")
        self.assertEqual(cpe.version, ANY)
        self.assertEqual(cpe.update, ANY)
        self.assertIsNone(cpe.language)
        self.assertIsNone(cpe.edition)
        self.assertEqual(cpe.sw_edition, "special")
        self.assertEqual(cpe.target_sw, "ipod_touch")
        self.assertEqual(cpe.target_hw, "80gb")
        self.assertIsNone(cpe.other)

    def test_formatted_string_binding(self):
        cpe_string = (
            "cpe:2.3:a:qrokes:qr_twitter_widget:*:*:*:*:*:wordpress:*:*"
        )
        cpe = CPE.from_string(cpe_string)

        self.assertEqual(
            str(cpe),
            "cpe:2.3:a:qrokes:qr_twitter_widget:*:*:*:*:*:wordpress:*:*",
        )
        self.assertEqual(
            cpe.as_uri_binding(),
            "cpe:/a:qrokes:qr_twitter_widget:::~~~wordpress~~",
        )
        self.assertEqual(
            cpe.as_formatted_string_binding(),
            "cpe:2.3:a:qrokes:qr_twitter_widget:*:*:*:*:*:wordpress:*:*",
        )
        self.assertFalse(cpe.is_uri_binding())
        self.assertTrue(cpe.is_formatted_string_binding())
        self.assertEqual(cpe.part, Part.APPLICATION)
        self.assertEqual(cpe.vendor, "qrokes")
        self.assertEqual(cpe.product, "qr_twitter_widget")
        self.assertEqual(cpe.version, ANY)
        self.assertEqual(cpe.update, ANY)
        self.assertEqual(cpe.edition, ANY)
        self.assertEqual(cpe.language, ANY)
        self.assertEqual(cpe.sw_edition, ANY)
        self.assertEqual(cpe.target_sw, "wordpress")
        self.assertEqual(cpe.target_hw, ANY)
        self.assertEqual(cpe.other, ANY)

    def test_uri_bind_examples(self):
        # test examples from https://nvlpubs.nist.gov/nistpubs/Legacy/IR/nistir7695.pdf

        # example 1
        cpe = CPE(
            part=Part.APPLICATION,
            vendor="microsoft",
            product="internet_explorer",
            version="8\.0\.6001",
            update="beta",
            edition=ANY,
        )
        self.assertEqual(
            cpe.as_uri_binding(),
            "cpe:/a:microsoft:internet_explorer:8.0.6001:beta",
        )

        # example 2
        cpe = CPE(
            part=Part.APPLICATION,
            vendor="microsoft",
            product="internet_explorer",
            version="8\.*",
            update="sp?",
        )
        self.assertEqual(
            cpe.as_uri_binding(),
            "cpe:/a:microsoft:internet_explorer:8.%02:sp%01",
        )

        # example 3
        cpe = CPE(
            part=Part.APPLICATION,
            vendor="hp",
            product="insight_diagnostics",
            version="7\.4\.0\.1570",
            update=NA,
            sw_edition="online",
            target_sw="win2003",
            target_hw="x64",
        )
        self.assertEqual(
            cpe.as_uri_binding(),
            "cpe:/a:hp:insight_diagnostics:7.4.0.1570:-:~~online~win2003~x64~",
        )

        # example 4
        cpe = CPE(
            part=Part.APPLICATION,
            vendor="hp",
            product="openview_network_manager",
            version="7\.51",
            target_sw="linux",
        )
        self.assertEqual(
            cpe.as_uri_binding(),
            "cpe:/a:hp:openview_network_manager:7.51::~~~linux~~",
        )

        # example 5
        cpe = CPE(
            part=Part.APPLICATION,
            vendor="foo\\\\bar",
            product="big\$money_manager_2010",
            sw_edition="special",
            target_sw="ipod_touch",
            target_hw="80gb",
        )
        self.assertEqual(
            cpe.as_uri_binding(),
            "cpe:/a:foo%5cbar:big%24money_manager_2010:::~~special~ipod_touch~80gb~",
        )

    def test_uri_unbind_examples(self):
        # test examples from https://nvlpubs.nist.gov/nistpubs/Legacy/IR/nistir7695.pdf

        # example 1
        cpe = CPE.from_string(
            "cpe:/a:microsoft:internet_explorer:8.0.6001:beta"
        )
        self.assertTrue(cpe.is_uri_binding())
        self.assertFalse(cpe.is_formatted_string_binding())
        self.assertEqual(cpe.part, Part.APPLICATION)
        self.assertEqual(cpe.vendor, "microsoft")
        self.assertEqual(cpe.product, "internet_explorer")
        self.assertEqual(cpe.version, "8.0.6001")
        self.assertEqual(cpe.update, "beta")
        self.assertIsNone(cpe.language)
        self.assertIsNone(cpe.edition)
        self.assertIsNone(cpe.sw_edition)
        self.assertIsNone(cpe.target_sw)
        self.assertIsNone(cpe.target_hw)
        self.assertIsNone(cpe.other)

        # example 2
        cpe = CPE.from_string("cpe:/a:microsoft:internet_explorer:8.%2a:sp%3f")
        self.assertTrue(cpe.is_uri_binding())
        self.assertFalse(cpe.is_formatted_string_binding())
        self.assertEqual(cpe.part, Part.APPLICATION)
        self.assertEqual(cpe.vendor, "microsoft")
        self.assertEqual(cpe.product, "internet_explorer")
        self.assertEqual(cpe.version, "8.\\*")
        self.assertEqual(cpe.update, "sp\\?")
        self.assertIsNone(cpe.language)
        self.assertIsNone(cpe.edition)
        self.assertIsNone(cpe.sw_edition)
        self.assertIsNone(cpe.target_sw)
        self.assertIsNone(cpe.target_hw)
        self.assertIsNone(cpe.other)

        # example 3
        cpe = CPE.from_string("cpe:/a:microsoft:internet_explorer:8.%02:sp%01")
        self.assertTrue(cpe.is_uri_binding())
        self.assertFalse(cpe.is_formatted_string_binding())
        self.assertEqual(cpe.part, Part.APPLICATION)
        self.assertEqual(cpe.vendor, "microsoft")
        self.assertEqual(cpe.product, "internet_explorer")
        self.assertEqual(cpe.version, "8.*")
        self.assertEqual(cpe.update, "sp?")
        self.assertIsNone(cpe.language)
        self.assertIsNone(cpe.edition)
        self.assertIsNone(cpe.sw_edition)
        self.assertIsNone(cpe.target_sw)
        self.assertIsNone(cpe.target_hw)
        self.assertIsNone(cpe.other)

        # example 4
        cpe = CPE.from_string(
            "cpe:/a:hp:insight_diagnostics:7.4.0.1570::~~online~win2003~x64~"
        )
        self.assertTrue(cpe.is_uri_binding())
        self.assertFalse(cpe.is_formatted_string_binding())
        self.assertEqual(cpe.part, Part.APPLICATION)
        self.assertEqual(cpe.vendor, "hp")
        self.assertEqual(cpe.product, "insight_diagnostics")
        self.assertEqual(cpe.version, "7.4.0.1570")
        self.assertEqual(cpe.update, ANY)
        self.assertIsNone(cpe.language)
        self.assertIsNone(cpe.edition)
        self.assertEqual(cpe.sw_edition, "online")
        self.assertEqual(cpe.target_sw, "win2003")
        self.assertEqual(cpe.target_hw, "x64")
        self.assertIsNone(cpe.other)

        # example 5
        cpe = CPE.from_string(
            "cpe:/a:hp:openview_network_manager:7.51:-:~~~linux~~"
        )
        self.assertTrue(cpe.is_uri_binding())
        self.assertFalse(cpe.is_formatted_string_binding())
        self.assertEqual(cpe.part, Part.APPLICATION)
        self.assertEqual(cpe.vendor, "hp")
        self.assertEqual(cpe.product, "openview_network_manager")
        self.assertEqual(cpe.version, "7.51")
        self.assertEqual(cpe.update, NA)
        self.assertIsNone(cpe.language)
        self.assertIsNone(cpe.edition)
        self.assertIsNone(cpe.sw_edition)
        self.assertEqual(cpe.target_sw, "linux")
        self.assertIsNone(cpe.target_hw)
        self.assertIsNone(cpe.other)

        # example 6
        # with self.assertRaises(CPEParsingError):
        #     CPE.from_string(
        #         "cpe:/a:foo%5cbar:big%24money_2010%07:::~~special~ipod_touch~80gb~"
        #     )

        # example 7
        cpe = CPE.from_string("cpe:/a:foo~bar:big%7emoney_2010")
        self.assertTrue(cpe.is_uri_binding())
        self.assertFalse(cpe.is_formatted_string_binding())
        self.assertEqual(cpe.part, Part.APPLICATION)
        self.assertEqual(cpe.vendor, "foo~bar")
        self.assertEqual(cpe.product, "big~money_2010")
        self.assertIsNone(cpe.version)
        self.assertIsNone(cpe.update)
        self.assertIsNone(cpe.language)
        self.assertIsNone(cpe.edition)
        self.assertIsNone(cpe.sw_edition)
        self.assertIsNone(cpe.target_sw)
        self.assertIsNone(cpe.target_hw)
        self.assertIsNone(cpe.other)

        # example 7a
        cpe = CPE.from_string("cpe:2.3:*:Microsoft")
        self.assertFalse(cpe.is_uri_binding())
        self.assertTrue(cpe.is_formatted_string_binding())
        self.assertEqual(cpe.part, Part.WILDCARD)
        self.assertEqual(cpe.vendor, "microsoft")
        self.assertIsNone(cpe.product)
        self.assertIsNone(cpe.version)
        self.assertIsNone(cpe.update)
        self.assertIsNone(cpe.language)
        self.assertIsNone(cpe.edition)
        self.assertIsNone(cpe.sw_edition)
        self.assertIsNone(cpe.target_sw)
        self.assertIsNone(cpe.target_hw)
        self.assertIsNone(cpe.other)

        # example 8
        with self.assertRaisesRegex(
            CPEParsingError,
            "^Percent-encoded asterisk is no at the beginning or the end "
            "of '12.%02.1234'$",
        ):
            CPE.from_string("cpe:/a:foo:bar:12.%02.1234")

    def test_formatted_bind_examples(self):
        # test examples from https://nvlpubs.nist.gov/nistpubs/Legacy/IR/nistir7695.pdf

        # example 1
        cpe = CPE(
            part=Part.APPLICATION,
            vendor="microsoft",
            product="internet_explorer",
            version="8\.0\.6001",
            update="beta",
            edition=ANY,
        )
        self.assertEqual(
            cpe.as_formatted_string_binding(),
            "cpe:2.3:a:microsoft:internet_explorer:8.0.6001:beta:*:*:*:*:*:*",
        )

        # example 2
        cpe = CPE(
            part=Part.APPLICATION,
            vendor="microsoft",
            product="internet_explorer",
            version="8\.*",
            update="sp?",
            edition=ANY,
        )
        self.assertEqual(
            cpe.as_formatted_string_binding(),
            "cpe:2.3:a:microsoft:internet_explorer:8.*:sp?:*:*:*:*:*:*",
        )

        cpe = CPE(
            part=Part.APPLICATION,
            vendor="microsoft",
            product="internet_explorer",
            version="8\.\*",
            update="sp?",
        )
        self.assertEqual(
            cpe.as_formatted_string_binding(),
            "cpe:2.3:a:microsoft:internet_explorer:8.\*:sp?:*:*:*:*:*:*",
        )

        # example 3
        cpe = CPE(
            part=Part.APPLICATION,
            vendor="hp",
            product="insight",
            version="7\.4\.0\.1570",
            update=NA,
            sw_edition="online",
            target_sw="win2003",
            target_hw="x64",
        )
        self.assertEqual(
            cpe.as_formatted_string_binding(),
            "cpe:2.3:a:hp:insight:7.4.0.1570:-:*:*:online:win2003:x64:*",
        )

        # example 4
        cpe = CPE(
            part=Part.APPLICATION,
            vendor="hp",
            product="openview_network_manager",
            version="7\.51",
            target_sw="linux",
        )
        self.assertEqual(
            cpe.as_formatted_string_binding(),
            "cpe:2.3:a:hp:openview_network_manager:7.51:*:*:*:*:linux:*:*",
        )

        # example 5
        cpe = CPE(
            part=Part.APPLICATION,
            vendor="foo\\\\bar",
            product="big\$money_2010",
            sw_edition="special",
            target_sw="ipod_touch",
            target_hw="80gb",
        )
        self.assertEqual(
            cpe.as_formatted_string_binding(),
            "cpe:2.3:a:foo\\\\bar:big\$money_2010:*:*:*:*:special:ipod_touch:80gb:*",
        )

    def test_formatted_unbind_examples(self):
        # test examples from https://nvlpubs.nist.gov/nistpubs/Legacy/IR/nistir7695.pdf

        # example 1
        cpe = CPE.from_string(
            "cpe:2.3:a:microsoft:internet_explorer:8.0.6001:beta:*:*:*:*:*:*"
        )
        self.assertFalse(cpe.is_uri_binding())
        self.assertTrue(cpe.is_formatted_string_binding())
        self.assertEqual(cpe.part, Part.APPLICATION)
        self.assertEqual(cpe.vendor, "microsoft")
        self.assertEqual(cpe.product, "internet_explorer")
        self.assertEqual(cpe.version, "8.0.6001")
        self.assertEqual(cpe.update, "beta")
        self.assertEqual(cpe.edition, ANY)
        self.assertEqual(cpe.language, ANY)
        self.assertEqual(cpe.sw_edition, ANY)
        self.assertEqual(cpe.target_sw, ANY)
        self.assertEqual(cpe.target_hw, ANY)
        self.assertEqual(cpe.other, ANY)

        # example 2
        cpe = CPE.from_string(
            "cpe:2.3:a:microsoft:internet_explorer:8.*:sp?:*:*:*:*:*:*"
        )
        self.assertFalse(cpe.is_uri_binding())
        self.assertTrue(cpe.is_formatted_string_binding())
        self.assertEqual(cpe.part, Part.APPLICATION)
        self.assertEqual(cpe.vendor, "microsoft")
        self.assertEqual(cpe.product, "internet_explorer")
        self.assertEqual(cpe.version, "8.*")
        self.assertEqual(cpe.update, "sp?")
        self.assertEqual(cpe.language, ANY)
        self.assertEqual(cpe.edition, ANY)
        self.assertEqual(cpe.sw_edition, ANY)
        self.assertEqual(cpe.target_sw, ANY)
        self.assertEqual(cpe.target_hw, ANY)
        self.assertEqual(cpe.other, ANY)

        # example 3
        cpe = CPE.from_string(
            "cpe:2.3:a:hp:insight_diagnostics:7.4.0.1570:-:*:*:online:win2003:x64:*"
        )
        self.assertFalse(cpe.is_uri_binding())
        self.assertTrue(cpe.is_formatted_string_binding())
        self.assertEqual(cpe.part, Part.APPLICATION)
        self.assertEqual(cpe.vendor, "hp")
        self.assertEqual(cpe.product, "insight_diagnostics")
        self.assertEqual(cpe.version, "7.4.0.1570")
        self.assertEqual(cpe.update, NA)
        self.assertEqual(cpe.language, ANY)
        self.assertEqual(cpe.edition, ANY)
        self.assertEqual(cpe.sw_edition, "online")
        self.assertEqual(cpe.target_sw, "win2003")
        self.assertEqual(cpe.target_hw, "x64")
        self.assertEqual(cpe.other, ANY)

        with self.assertRaisesRegex(
            CPEParsingError,
            "^An unquoted asterisk must appear at the beginning or end of "
            "'7.4.*.1570'$",
        ):
            # embedded unquoted asterisk in the version attribute
            CPE.from_string(
                "cpe:2.3:a:hp:insight_diagnostics:7.4.*.1570:*:*:*:*:*:*"
            )

        # example 4
        cpe = CPE.from_string(
            "cpe:2.3:a:foo\\\\bar:big\$money:2010:*:*:*:special:ipod_touch:80gb:*"
        )
        self.assertFalse(cpe.is_uri_binding())
        self.assertTrue(cpe.is_formatted_string_binding())
        self.assertEqual(cpe.part, Part.APPLICATION)
        self.assertEqual(cpe.vendor, "foo\\bar")
        self.assertEqual(cpe.product, "big$money")
        self.assertEqual(cpe.version, "2010")
        self.assertEqual(cpe.update, ANY)
        self.assertEqual(cpe.edition, ANY)
        self.assertEqual(cpe.language, ANY)
        self.assertEqual(cpe.sw_edition, "special")
        self.assertEqual(cpe.target_sw, "ipod_touch")
        self.assertEqual(cpe.target_hw, "80gb")
        self.assertEqual(cpe.other, ANY)

        cpe = CPE.from_string("cpe:2.3:a:foo:bar\:mumble:1.0:*:*:*:*:*:*:*")
        self.assertFalse(cpe.is_uri_binding())
        self.assertTrue(cpe.is_formatted_string_binding())
        self.assertEqual(cpe.part, Part.APPLICATION)
        self.assertEqual(cpe.vendor, "foo")
        self.assertEqual(cpe.product, "bar:mumble")
        self.assertEqual(cpe.version, "1.0")
        self.assertEqual(cpe.update, ANY)
        self.assertEqual(cpe.edition, ANY)
        self.assertEqual(cpe.language, ANY)
        self.assertEqual(cpe.sw_edition, ANY)
        self.assertEqual(cpe.target_sw, ANY)
        self.assertEqual(cpe.target_hw, ANY)
        self.assertEqual(cpe.other, ANY)

    def test_as_uri_binding(self):
        cpe = CPE.from_string("cpe:2.3:a:microsoft:internet_explorer:8\\.*:sp?")
        self.assertEqual(
            cpe.as_uri_binding(),
            "cpe:/a:microsoft:internet_explorer:8.%02:sp%01",
        )

        cpe = CPE.from_string("cpe:2.3:a:cgiirc:cgi\:irc:0.5.7:*:*:*:*:*:*:*")
        self.assertEqual(
            cpe.as_uri_binding(),
            "cpe:/a:cgiirc:cgi%3airc:0.5.7",
        )

    def test_as_uri_binding_with_edition(self):
        cpe_string = "cpe:2.3:a:hp:insight_diagnostics:7.4.0.1570:-:*:*:online:win2003:x64"
        cpe = CPE.from_string(cpe_string)

        self.assertEqual(
            cpe.as_uri_binding(),
            "cpe:/a:hp:insight_diagnostics:7.4.0.1570:-:~~online~win2003~x64~",
        )

    def test_parse_error(self):
        with self.assertRaisesRegex(
            CPEParsingError,
            "^Invalid CPE string 'foo/bar'. CPE does not start with 'cpe:/' "
            "or 'cpe:2.3'$",
        ):
            CPE.from_string("foo/bar")

    def test_str(self):
        cpe = CPE(part=Part.APPLICATION, vendor="foo", product="bar")
        self.assertEqual(str(cpe), "cpe:/a:foo:bar")

        cpe = CPE(
            part=Part.APPLICATION,
            vendor="foo",
            product="bar",
            target_sw="ipsum",
        )
        self.assertEqual(str(cpe), "cpe:2.3:a:foo:bar:*:*:*:*:*:ipsum:*:*")

        cpe = CPE(
            cpe_string="cpe:2.3:a:foo:bar",
            part=Part.APPLICATION,
            vendor="foo",
            product="bar",
        )
        self.assertEqual(str(cpe), "cpe:2.3:a:foo:bar")

    def test_has_extended_attribute(self):
        cpe = CPE(part=Part.APPLICATION, vendor="foo", product="bar")
        self.assertFalse(cpe.has_extended_attribute())

        cpe = CPE(
            part=Part.APPLICATION,
            vendor="foo",
            product="bar",
            target_sw="ipsum",
        )
        self.assertTrue(cpe.has_extended_attribute())

    def test_clone(self):
        cpe = CPE.from_string(
            "cpe:2.3:a:hp:openview_network_manager:7.51:*:*:*:*:linux:*:*"
        )

        cpe2 = cpe.clone()
        self.assertIsNot(cpe, cpe2)

        cpe = CPE.from_string(
            "cpe:2.3:a:hp:openview_network_manager:7.51:*:*:*:*:linux:*:*"
        )
        cpe2 = cpe.clone(version=ANY)
        self.assertIsNot(cpe, cpe2)
        self.assertEqual(cpe.version, "7.51")
        self.assertEqual(cpe2.version, ANY)

    def test_equal(self):
        cpe1 = CPE.from_string("cpe:2.3:a:3com:3cdaemon:-:*:*:*:*:*:*:*")
        cpe2 = CPE.from_string("cpe:2.3:a:adobe:flash_player:-:*:*:*:*:*:*:*")
        cpe3 = CPE.from_string("cpe:2.3:a:3com:3cdaemon:-:*:*:*:*:*:*:*")

        self.assertNotEqual(cpe1, None)
        self.assertNotEqual(cpe1, "foo")
        self.assertNotEqual(cpe1, cpe2)
        self.assertIsNot(cpe1, cpe3)
        self.assertEqual(cpe1, cpe3)

    def test_hashable(self):
        cpe1 = CPE.from_string("cpe:2.3:a:3com:3cdaemon:-:*:*:*:*:*:*:*")
        cpe2 = CPE.from_string("cpe:2.3:a:adobe:flash_player:-:*:*:*:*:*:*:*")
        cpe3 = CPE.from_string("cpe:2.3:a:3com:3cdaemon:-:*:*:*:*:*:*:*")

        cpe_list = [cpe1, cpe2, cpe3, cpe1]
        self.assertTrue(len(cpe_list), 4)

        cpe_set = set(cpe_list)
        self.assertTrue(len(cpe_set), 2)

    def test_repr(self):
        cpe1 = CPE.from_string("cpe:2.3:a:3com:3cdaemon:-:*:*:*:*:*:*:*")

        self.assertEqual(
            repr(cpe1),
            '',
        )
pontos-25.3.2/tests/fake_pyproject.toml000066400000000000000000000001051476255566300202100ustar00rootroot00000000000000[tool.pontos.version]
version-module-file = "foo/bar/__version__.py"
pontos-25.3.2/tests/git/000077500000000000000000000000001476255566300150755ustar00rootroot00000000000000pontos-25.3.2/tests/git/__init__.py000066400000000000000000000001411476255566300172020ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later
#
pontos-25.3.2/tests/git/test_git.py000066400000000000000000001017421476255566300172760ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later
#

import unittest
from pathlib import Path
from unittest.mock import MagicMock, patch

from pontos.git import (
    DEFAULT_TAG_SORT_SUFFIX,
    ConfigScope,
    Git,
    GitError,
    MergeStrategy,
    ResetMode,
    Status,
    TagSort,
)
from pontos.testing import temp_directory, temp_git_repository


class GitTestCase(unittest.TestCase):
    @patch("pontos.git._git.exec_git")
    def test_exec(self, exec_git_mock):
        git = Git()
        git.exec("foo", "bar", "baz")

        exec_git_mock.assert_called_once_with("foo", "bar", "baz", cwd=None)

    @patch("pontos.git._git.exec_git")
    def test_clone(self, exec_git_mock):
        git = Git()
        git.clone("http://foo/foo.git", Path("/bar"))

        exec_git_mock.assert_called_once_with(
            "clone", "http://foo/foo.git", "/bar", cwd=None
        )

    @patch("pontos.git._git.exec_git")
    def test_clone_with_remote(self, exec_git_mock):
        git = Git()
        git.clone("http://foo/foo.git", Path("/bar"), remote="foo")

        exec_git_mock.assert_called_once_with(
            "clone", "-o", "foo", "http://foo/foo.git", "/bar", cwd=None
        )

    @patch("pontos.git._git.exec_git")
    def test_clone_with_branch(self, exec_git_mock):
        git = Git()
        git.clone("http://foo/foo.git", Path("/bar"), branch="foo")

        exec_git_mock.assert_called_once_with(
            "clone", "-b", "foo", "http://foo/foo.git", "/bar", cwd=None
        )

    @patch("pontos.git._git.exec_git")
    def test_clone_with_depth(self, exec_git_mock):
        git = Git()
        git.clone("http://foo/foo.git", Path("/bar"), depth=1)

        exec_git_mock.assert_called_once_with(
            "clone", "--depth", "1", "http://foo/foo.git", "/bar", cwd=None
        )

    @patch("pontos.git._git.exec_git")
    def test_init(self, exec_git_mock):
        git = Git()
        git.init()

        exec_git_mock.assert_called_once_with("init", cwd=None)

    @patch("pontos.git._git.exec_git")
    def test_init_bare(self, exec_git_mock):
        git = Git()
        git.init(bare=True)

        exec_git_mock.assert_called_once_with("init", "--bare", cwd=None)

    def test_cwd(self):
        git = Git()

        self.assertIsNone(git.cwd)

        new_cwd = Path("foo")
        git.cwd = new_cwd

        self.assertEqual(git.cwd, new_cwd.absolute())

    @patch("pontos.git._git.exec_git")
    def test_create_branch(self, exec_git_mock):
        git = Git()
        git.create_branch("foo")

        exec_git_mock.assert_called_once_with("checkout", "-b", "foo", cwd=None)

    @patch("pontos.git._git.exec_git")
    def test_create_branch_with_starting_point(self, exec_git_mock):
        git = Git()
        git.create_branch("foo", start_point="bar")

        exec_git_mock.assert_called_once_with(
            "checkout", "-b", "foo", "bar", cwd=None
        )

    @patch("pontos.git._git.exec_git")
    def test_rebase(self, exec_git_mock):
        git = Git()
        git.rebase("foo")

        exec_git_mock.assert_called_once_with("rebase", "foo", cwd=None)

    @patch("pontos.git._git.exec_git")
    def test_rebase_with_head(self, exec_git_mock):
        git = Git()
        git.rebase("foo", head="bar")

        exec_git_mock.assert_called_once_with("rebase", "foo", "bar", cwd=None)

    @patch("pontos.git._git.exec_git")
    def test_rebase_with_head_and_onto(self, exec_git_mock):
        git = Git()
        git.rebase("foo", head="bar", onto="staging")

        exec_git_mock.assert_called_once_with(
            "rebase", "--onto", "staging", "foo", "bar", cwd=None
        )

    @patch("pontos.git._git.exec_git")
    def test_rebase_with_octopus_strategy(self, exec_git_mock):
        git = Git()
        git.rebase("foo", strategy=MergeStrategy.OCTOPUS)

        exec_git_mock.assert_called_once_with(
            "rebase", "--strategy", "octopus", "foo", cwd=None
        )

    @patch("pontos.git._git.exec_git")
    def test_rebase_with_ort_ours_strategy(self, exec_git_mock):
        git = Git()
        git.rebase("foo", strategy=MergeStrategy.ORT_OURS)

        exec_git_mock.assert_called_once_with(
            "rebase", "--strategy", "ort", "-X", "ours", "foo", cwd=None
        )

    @patch("pontos.git._git.exec_git")
    def test_push(self, exec_git_mock):
        git = Git()
        git.push()

        exec_git_mock.assert_called_once_with("push", cwd=None)

    @patch("pontos.git._git.exec_git")
    def test_push_with_remote(self, exec_git_mock):
        git = Git()
        git.push(remote="foo")

        exec_git_mock.assert_called_once_with("push", "foo", cwd=None)

    @patch("pontos.git._git.exec_git")
    def test_push_with_remote_and_branch(self, exec_git_mock):
        git = Git()
        git.push(remote="foo", branch="bar")

        exec_git_mock.assert_called_once_with("push", "foo", "bar", cwd=None)

    @patch("pontos.git._git.exec_git")
    def test_push_with_follow_tags(self, exec_git_mock):
        git = Git()
        git.push(follow_tags=True)

        exec_git_mock.assert_called_once_with("push", "--follow-tags", cwd=None)

    @patch("pontos.git._git.exec_git")
    def test_push_with_follow_tags_false(self, exec_git_mock):
        git = Git()
        git.push(follow_tags=False)

        exec_git_mock.assert_called_once_with("push", cwd=None)

    @patch("pontos.git._git.exec_git")
    def test_push_with_force(self, exec_git_mock):
        git = Git()
        git.push(force=True)

        exec_git_mock.assert_called_once_with("push", "--force", cwd=None)

    @patch("pontos.git._git.exec_git")
    def test_push_with_force_false(self, exec_git_mock):
        git = Git()
        git.push(force=False)

        exec_git_mock.assert_called_once_with("push", cwd=None)

    @patch("pontos.git._git.exec_git")
    def test_push_branch_with_delete(self, exec_git_mock):
        git = Git()
        git.push(delete=True, branch="v1.2.3", remote="origin")

        exec_git_mock.assert_called_once_with(
            "push", "--delete", "origin", "v1.2.3", cwd=None
        )

    @patch("pontos.git._git.exec_git")
    def test_push_refspec_with_delete(self, exec_git_mock):
        git = Git()
        git.push("v1.2.3", delete=True)

        exec_git_mock.assert_called_once_with(
            "push", "--delete", "v1.2.3", cwd=None
        )

    @patch("pontos.git._git.exec_git")
    def test_push_refspec(self, exec_git_mock):
        git = Git()
        git.push("v1.2.3")

        exec_git_mock.assert_called_once_with("push", "v1.2.3", cwd=None)

    @patch("pontos.git._git.exec_git")
    def test_push_refspecs(self, exec_git_mock):
        git = Git()
        git.push(["v1.2.3", "main"])

        exec_git_mock.assert_called_once_with(
            "push", "v1.2.3", "main", cwd=None
        )

    @patch("pontos.git._git.exec_git")
    def test_config_get(self, exec_git_mock):
        git = Git()
        git.config("foo")

        exec_git_mock.assert_called_once_with("config", "foo", cwd=None)

    @patch("pontos.git._git.exec_git")
    def test_config_set(self, exec_git_mock):
        git = Git()
        git.config("foo", "bar")

        exec_git_mock.assert_called_once_with("config", "foo", "bar", cwd=None)

    @patch("pontos.git._git.exec_git")
    def test_config_get_local_scope(self, exec_git_mock):
        git = Git()
        git.config("foo", scope=ConfigScope.LOCAL)

        exec_git_mock.assert_called_once_with(
            "config", "--local", "foo", cwd=None
        )

    @patch("pontos.git._git.exec_git")
    def test_config_get_system_scope(self, exec_git_mock):
        git = Git()
        git.config("foo", scope=ConfigScope.SYSTEM)

        exec_git_mock.assert_called_once_with(
            "config", "--system", "foo", cwd=None
        )

    @patch("pontos.git._git.exec_git")
    def test_config_get_global_scope(self, exec_git_mock):
        git = Git()
        git.config("foo", scope=ConfigScope.GLOBAL)

        exec_git_mock.assert_called_once_with(
            "config", "--global", "foo", cwd=None
        )

    @patch("pontos.git._git.exec_git")
    def test_config_get_worktree_scope(self, exec_git_mock):
        git = Git()
        git.config("foo", scope=ConfigScope.WORKTREE)

        exec_git_mock.assert_called_once_with(
            "config", "--worktree", "foo", cwd=None
        )

    @patch("pontos.git._git.exec_git")
    def test_cherry_pick(self, exec_git_mock):
        git = Git()
        git.cherry_pick(["foo", "bar"])

        exec_git_mock.assert_called_once_with(
            "cherry-pick", "foo", "bar", cwd=None
        )

    @patch("pontos.git._git.exec_git")
    def test_cherry_pick_single_commit(self, exec_git_mock):
        git = Git()
        git.cherry_pick("foo")

        exec_git_mock.assert_called_once_with("cherry-pick", "foo", cwd=None)

    @patch("pontos.git._git.exec_git")
    def test_list_tags(self, exec_git_mock):
        exec_git_mock.return_value = "v1.0\nv2.0\nv2.1\n"
        git = Git()
        tags = git.list_tags()

        exec_git_mock.assert_called_once_with("tag", "-l", cwd=None)

        self.assertEqual(len(tags), 3)
        self.assertEqual(tags[0], "v1.0")
        self.assertEqual(tags[1], "v2.0")
        self.assertEqual(tags[2], "v2.1")

    @patch("pontos.git._git.exec_git")
    def test_list_tags_with_version_sort(self, exec_git_mock):
        exec_git_mock.return_value = "v1.0\nv2.0\nv2.1\n"
        git = Git()
        tags = git.list_tags(sort=TagSort.VERSION)

        exec_git_mock.assert_called_once_with(
            "tag",
            "-l",
            "--sort=version:refname",
            cwd=None,
        )

        self.assertEqual(len(tags), 3)
        self.assertEqual(tags[0], "v1.0")
        self.assertEqual(tags[1], "v2.0")
        self.assertEqual(tags[2], "v2.1")

    @patch("pontos.git._git.exec_git")
    def test_list_tags_with_version_suffix_sort(self, exec_git_mock):
        exec_git_mock.return_value = "v1.0\nv2.0\nv2.1\n"
        git = Git()
        tags = git.list_tags(
            sort=TagSort.VERSION, sort_suffix=DEFAULT_TAG_SORT_SUFFIX
        )

        exec_git_mock.assert_called_once_with(
            "-c",
            "versionsort.suffix=-alpha",
            "-c",
            "versionsort.suffix=a",
            "-c",
            "versionsort.suffix=-beta",
            "-c",
            "versionsort.suffix=b",
            "-c",
            "versionsort.suffix=-rc",
            "-c",
            "versionsort.suffix=rc",
            "tag",
            "-l",
            "--sort=version:refname",
            cwd=None,
        )

        self.assertEqual(len(tags), 3)
        self.assertEqual(tags[0], "v1.0")
        self.assertEqual(tags[1], "v2.0")
        self.assertEqual(tags[2], "v2.1")

    @patch("pontos.git._git.exec_git")
    def test_list_tags_with_tag_name(self, exec_git_mock):
        exec_git_mock.return_value = "v2.0\nv2.1\n"
        git = Git()
        tags = git.list_tags(tag_name="v2.*")

        exec_git_mock.assert_called_once_with("tag", "-l", "v2.*", cwd=None)

        self.assertEqual(len(tags), 2)
        self.assertEqual(tags[0], "v2.0")
        self.assertEqual(tags[1], "v2.1")

    @patch("pontos.git._git.exec_git")
    def test_add_single_file(self, exec_git_mock):
        git = Git()
        git.add("foo")

        exec_git_mock.assert_called_once_with("add", "foo", cwd=None)

    @patch("pontos.git._git.exec_git")
    def test_add(self, exec_git_mock):
        git = Git()
        git.add(["foo", "bar"])

        exec_git_mock.assert_called_once_with("add", "foo", "bar", cwd=None)

    @patch("pontos.git._git.exec_git")
    def test_commit(self, exec_git_mock):
        git = Git()
        git.commit("Add foo")

        exec_git_mock.assert_called_once_with(
            "commit", "-m", "Add foo", cwd=None
        )

    @patch("pontos.git._git.exec_git")
    def test_commit_with_signing_key(self, exec_git_mock):
        git = Git()
        git.commit(
            "Add foo",
            gpg_signing_key="8AE4BE429B60A59B311C2E739823FAA60ED1E580",
        )

        exec_git_mock.assert_called_once_with(
            "commit",
            "-S8AE4BE429B60A59B311C2E739823FAA60ED1E580",
            "-m",
            "Add foo",
            cwd=None,
        )

    @patch("pontos.git._git.exec_git")
    def test_commit_without_verify(self, exec_git_mock):
        git = Git()
        git.commit("Add foo", verify=False)

        exec_git_mock.assert_called_once_with(
            "commit", "--no-verify", "-m", "Add foo", cwd=None
        )

    @patch("pontos.git._git.exec_git")
    def test_commit_without_gpg_sign(self, exec_git_mock):
        git = Git()
        git.commit("Add foo", gpg_sign=False)

        exec_git_mock.assert_called_once_with(
            "commit", "--no-gpg-sign", "-m", "Add foo", cwd=None
        )

    @patch("pontos.git._git.exec_git")
    def test_tag(self, exec_git_mock):
        git = Git()
        git.tag(tag="test")

        exec_git_mock.assert_called_once_with("tag", "test", cwd=None)

    @patch("pontos.git._git.exec_git")
    def test_tag_with_gpg_key(self, exec_git_mock):
        git = Git()
        git.tag("test", gpg_key_id="0x123")

        exec_git_mock.assert_called_once_with(
            "tag", "-u", "0x123", "test", cwd=None
        )

    @patch("pontos.git._git.exec_git")
    def test_tag_with_message(self, exec_git_mock):
        git = Git()
        git.tag("test", message="Tag for 123 release")

        exec_git_mock.assert_called_once_with(
            "tag", "-m", "Tag for 123 release", "test", cwd=None
        )

    @patch("pontos.git._git.exec_git")
    def test_tag_with_force(self, exec_git_mock):
        git = Git()
        git.tag("test", force=True)

        exec_git_mock.assert_called_once_with(
            "tag", "--force", "test", cwd=None
        )

    @patch("pontos.git._git.exec_git")
    def test_tag_without_sign(self, exec_git_mock):
        git = Git()
        git.tag("test", sign=False)

        exec_git_mock.assert_called_once_with(
            "tag", "--no-sign", "test", cwd=None
        )

    @patch("pontos.git._git.exec_git")
    def test_fetch(self, exec_git_mock):
        git = Git()
        git.fetch()

        exec_git_mock.assert_called_once_with("fetch", cwd=None)

    @patch("pontos.git._git.exec_git")
    def test_fetch_with_remote(self, exec_git_mock):
        git = Git()
        git.fetch("foo")

        exec_git_mock.assert_called_once_with("fetch", "foo", cwd=None)

    @patch("pontos.git._git.exec_git")
    def test_fetch_with_remote_and_refspec(self, exec_git_mock):
        git = Git()
        git.fetch("foo", "my-branch")

        exec_git_mock.assert_called_once_with(
            "fetch", "foo", "my-branch", cwd=None
        )

    @patch("pontos.git._git.exec_git")
    def test_fetch_with_verbose(self, exec_git_mock):
        git = Git()
        git.fetch(verbose=True)

        exec_git_mock.assert_called_once_with("fetch", "-v", cwd=None)

    @patch("pontos.git._git.exec_git")
    def test_add_remote(self, exec_git_mock):
        git = Git()
        git.add_remote("foo", "https://foo.bar/foo.git")

        exec_git_mock.assert_called_once_with(
            "remote", "add", "foo", "https://foo.bar/foo.git", cwd=None
        )

    @patch("pontos.git._git.exec_git")
    def test_checkout(self, exec_git_mock):
        git = Git()
        git.checkout("foo")

        exec_git_mock.assert_called_once_with("checkout", "foo", cwd=None)

    @patch("pontos.git._git.exec_git")
    def test_checkout_with_start_point(self, exec_git_mock):
        git = Git()
        git.checkout("foo", start_point="bar")

        exec_git_mock.assert_called_once_with(
            "checkout", "-b", "foo", "bar", cwd=None
        )

    @patch("pontos.git._git.exec_git")
    def test_remote_url(self, exec_git_mock):
        url = "git@github.com:foo/foo.git"
        exec_git_mock.return_value = url

        git = Git()
        remote = git.remote_url("foo")

        exec_git_mock.assert_called_once_with(
            "remote", "get-url", "foo", cwd=None
        )

        self.assertEqual(remote, url)

    @patch("pontos.git._git.exec_git")
    def test_remote_url_with_default(self, exec_git_mock):
        url = "git@github.com:foo/foo.git"
        exec_git_mock.return_value = url

        git = Git()
        remote = git.remote_url()

        exec_git_mock.assert_called_once_with(
            "remote", "get-url", "origin", cwd=None
        )

        self.assertEqual(remote, url)

    @patch("pontos.git._git.exec_git")
    def test_log(self, exec_git_mock):
        # pylint: disable=line-too-long
        exec_git_mock.return_value = """commit 68c6c3785bbb049df63dc51f8b5b709eb19f8517
Author: Björn Ricks 
Date:   Wed Apr 8 15:16:05 2020 +0200

    Add a draft for a README.md document

commit 464f24d43d7293091b168c6b37ee37978a650958
Author: Björn Ricks 
Date:   Wed Apr 8 14:28:53 2020 +0200

    Initial commit
"""  # noqa: E501

        git = Git()
        logs = git.log()

        exec_git_mock.assert_called_once_with("log", cwd=None)

        self.assertEqual(
            logs[0], "commit 68c6c3785bbb049df63dc51f8b5b709eb19f8517"
        )
        self.assertEqual(
            logs[6], "commit 464f24d43d7293091b168c6b37ee37978a650958"
        )

    @patch("pontos.git._git.exec_git")
    def test_log_with_oneline(self, exec_git_mock):
        exec_git_mock.return_value = """50f9963 Add CircleCI config for pontos
9a8feaa Rename to pontos only
047cfae Update README for installation and development
e6ea80d Update README
68c6c37 Add a draft for a README.md document
464f24d Initial commit"""

        git = Git()
        logs = git.log(oneline=True)

        exec_git_mock.assert_called_once_with("log", "--oneline", cwd=None)

        self.assertEqual(logs[0], "50f9963 Add CircleCI config for pontos")
        self.assertEqual(logs[5], "464f24d Initial commit")

    @patch("pontos.git._git.exec_git")
    def test_log_with_format(self, exec_git_mock):
        exec_git_mock.return_value = """Add CircleCI config for pontos
Rename to pontos only
Update README for installation and development
Update README
Add a draft for a README.md document
Initial commit"""

        git = Git()
        logs = git.log(format="format:%s")

        exec_git_mock.assert_called_once_with(
            "log", "--format=format:%s", cwd=None
        )

        self.assertEqual(logs[0], "Add CircleCI config for pontos")
        self.assertEqual(logs[5], "Initial commit")

    @patch("pontos.git._git.exec_git")
    def test_rev_list(self, exec_git_mock):
        git = Git()
        git.rev_list("foo", "bar", "baz", max_parents=123, abbrev_commit=True)

        exec_git_mock.assert_called_once_with(
            "rev-list",
            "--max-parents=123",
            "--abbrev-commit",
            "foo",
            "bar",
            "baz",
            cwd=None,
        )

    @patch("pontos.git._git.exec_git")
    def test_move(self, exec_git_mock):
        git = Git()
        git.move("foo", "bar")

        exec_git_mock.assert_called_once_with(
            "mv",
            "foo",
            "bar",
            cwd=None,
        )

    @patch("pontos.git._git.exec_git")
    def test_remove(self, exec_git_mock):
        git = Git()
        git.remove("foo")

        exec_git_mock.assert_called_once_with(
            "rm",
            "foo",
            cwd=None,
        )

    @patch("pontos.git._git.exec_git")
    def test_status(self, exec_git_mock):
        git = Git()
        git.status()

        exec_git_mock.assert_called_once_with(
            "status",
            "-z",
            "--ignore-submodules",
            "--untracked-files=no",
            cwd=None,
        )

    @patch("pontos.git._git.exec_git")
    def test_status_with_files(self, exec_git_mock):
        git = Git()
        git.status(["foo", "bar", "baz"])

        exec_git_mock.assert_called_once_with(
            "status",
            "-z",
            "--ignore-submodules",
            "--untracked-files=no",
            "--",
            "foo",
            "bar",
            "baz",
            cwd=None,
        )

    @patch("pontos.git._git.exec_git")
    def test_version(self, exec_git_mock: MagicMock):
        exec_git_mock.return_value = "git version 1.2.3"
        git = Git()
        self.assertEqual(git.version, "1.2.3")

    def test_version_runs(self):
        """Getting the git version should not raise an error"""
        git = Git()
        git.version

    @patch("pontos.git._git.exec_git")
    def test_show_with_online_and_objects(self, exec_git_mock: MagicMock):
        exec_git_mock.return_value = """9a8feaa Rename to pontos only
047cfae Update README for installation and development
"""

        git = Git()
        show = git.show(oneline=True, objects=["9a8feaa", "047cfae"])

        exec_git_mock.assert_called_once_with(
            "show", "--oneline", "9a8feaa", "047cfae", cwd=None
        )

        self.assertEqual(show[0], "9a8feaa Rename to pontos only")
        self.assertEqual(
            show[1], "047cfae Update README for installation and development"
        )

    @patch("pontos.git._git.exec_git")
    def test_show_with_format(self, exec_git_mock: MagicMock):
        exec_git_mock.return_value = """Rename to pontos only
"""

        git = Git()
        show = git.show(format="format:%s")

        exec_git_mock.assert_called_once_with(
            "show", "--format=format:%s", cwd=None
        )

        self.assertEqual(show, "Rename to pontos only")

    @patch("pontos.git._git.exec_git")
    def test_show_with_single_object(self, exec_git_mock: MagicMock):
        content = """commit a6956fb1398cae9426e7ced0396248a90dc1ff64
Author: Björn Ricks 
Date:   Wed Jul 19 15:07:03 2023 +0200

    Add: Allow to get the git version string

diff --git a/pontos.git._git.py b/pontos.git._git.py
index a83eed8..09aed3d 100644
--- a/pontos.git._git.py
+++ b/pontos.git._git.py
@@ -168,6 +168,14 @@ class Git:
...
"""
        exec_git_mock.return_value = content

        git = Git()
        show = git.show(objects="9a8feaa")

        exec_git_mock.assert_called_once_with("show", "9a8feaa", cwd=None)

        self.assertEqual(show, content.strip())

    @patch("pontos.git._git.exec_git")
    def test_show_with_patch(self, exec_git_mock: MagicMock):
        content = """commit a6956fb1398cae9426e7ced0396248a90dc1ff64
Author: Björn Ricks 
Date:   Wed Jul 19 15:07:03 2023 +0200

    Add: Allow to get the git version string

diff --git a/pontos.git._git.py b/pontos.git._git.py
index a83eed8..09aed3d 100644
--- a/pontos.git._git.py
+++ b/pontos.git._git.py
@@ -168,6 +168,14 @@ class Git:
...
"""
        exec_git_mock.return_value = content

        git = Git()
        show = git.show(patch=True)

        exec_git_mock.assert_called_once_with("show", "--patch", cwd=None)

        self.assertEqual(show, content.strip())

    @patch("pontos.git._git.exec_git")
    def test_show_with_no_patch(self, exec_git_mock: MagicMock):
        content = """commit a6956fb1398cae9426e7ced0396248a90dc1ff64
Author: Björn Ricks 
Date:   Wed Jul 19 15:07:03 2023 +0200

    Add: Allow to get the git version string
"""
        exec_git_mock.return_value = content

        git = Git()
        show = git.show(patch=False)

        exec_git_mock.assert_called_once_with("show", "--no-patch", cwd=None)

        self.assertEqual(show, content.strip())

    @patch("pontos.git._git.exec_git")
    def test_delete_tag(self, exec_git_mock):
        git = Git()
        git.delete_tag("v1.2.3")

        exec_git_mock.assert_called_once_with("tag", "-d", "v1.2.3", cwd=None)

    @patch("pontos.git._git.exec_git")
    def test_reset_mixed(self, exec_git_mock):
        git = Git()
        git.reset("c1234", mode=ResetMode.MIXED)

        exec_git_mock.assert_called_once_with(
            "reset", "--mixed", "c1234", cwd=None
        )


class GitExtendedTestCase(unittest.TestCase):
    def test_semantic_list_tags(self):
        with temp_git_repository() as tmp_git:
            tags = [
                "v0.6.5-alpha3",
                "v0.6.5-beta1",
                "v0.6.5-alpha2",
                "v0.6.5-rc1",
                "v0.6.5-alpha1",
                "v0.6.5",
                "v0.6.4",
                "v0.6.3-alpha1",
                "v1.0.0",
            ]
            git = Git(tmp_git)
            git.config("commit.gpgSign", "false", scope=ConfigScope.LOCAL)
            git.config("tag.gpgSign", "false", scope=ConfigScope.LOCAL)
            git.config("tag.sort", "refname", scope=ConfigScope.LOCAL)

            tmp_file = tmp_git / "foo.txt"
            tmp_file.touch()

            git.add(tmp_file)

            git.commit("some commit")

            for tag in tags:
                git.tag(tag)

            tags = git.list_tags()
            self.assertEqual(
                tags,
                [
                    "v0.6.3-alpha1",
                    "v0.6.4",
                    "v0.6.5",
                    "v0.6.5-alpha1",
                    "v0.6.5-alpha2",
                    "v0.6.5-alpha3",
                    "v0.6.5-beta1",
                    "v0.6.5-rc1",
                    "v1.0.0",
                ],
            )

            tags = git.list_tags(sort=TagSort.VERSION)
            self.assertEqual(
                tags,
                [
                    "v0.6.3-alpha1",
                    "v0.6.4",
                    "v0.6.5",
                    "v0.6.5-alpha1",
                    "v0.6.5-alpha2",
                    "v0.6.5-alpha3",
                    "v0.6.5-beta1",
                    "v0.6.5-rc1",
                    "v1.0.0",
                ],
            )

            tags = git.list_tags(sort=TagSort.VERSION, tag_name="v0.6*")
            self.assertEqual(
                tags,
                [
                    "v0.6.3-alpha1",
                    "v0.6.4",
                    "v0.6.5",
                    "v0.6.5-alpha1",
                    "v0.6.5-alpha2",
                    "v0.6.5-alpha3",
                    "v0.6.5-beta1",
                    "v0.6.5-rc1",
                ],
            )

            tags = git.list_tags(sort=TagSort.VERSION, tag_name="v0.6.5*")
            self.assertEqual(
                tags,
                [
                    "v0.6.5",
                    "v0.6.5-alpha1",
                    "v0.6.5-alpha2",
                    "v0.6.5-alpha3",
                    "v0.6.5-beta1",
                    "v0.6.5-rc1",
                ],
            )

            tags = git.list_tags(tag_name="v0.6.5*")
            self.assertEqual(
                tags,
                [
                    "v0.6.5",
                    "v0.6.5-alpha1",
                    "v0.6.5-alpha2",
                    "v0.6.5-alpha3",
                    "v0.6.5-beta1",
                    "v0.6.5-rc1",
                ],
            )

            tags = git.list_tags(
                sort=TagSort.VERSION, sort_suffix=DEFAULT_TAG_SORT_SUFFIX
            )
            self.assertEqual(
                tags,
                [
                    "v0.6.3-alpha1",
                    "v0.6.4",
                    "v0.6.5-alpha1",
                    "v0.6.5-alpha2",
                    "v0.6.5-alpha3",
                    "v0.6.5-beta1",
                    "v0.6.5-rc1",
                    "v0.6.5",
                    "v1.0.0",
                ],
            )

    def test_pep440_list_tags(self):
        with temp_git_repository() as tmp_git:
            tags = [
                "v0.6.5a3",
                "v0.6.5b1",
                "v0.6.5a2",
                "v0.6.5rc1",
                "v0.6.5a1",
                "v0.6.5",
                "v0.6.4",
                "v0.6.3a1",
                "v1.0.0",
            ]
            git = Git(tmp_git)
            git.config("commit.gpgSign", "false", scope=ConfigScope.LOCAL)
            git.config("tag.gpgSign", "false", scope=ConfigScope.LOCAL)
            git.config("tag.sort", "refname", scope=ConfigScope.LOCAL)

            tmp_file = tmp_git / "foo.txt"
            tmp_file.touch()

            git.add(tmp_file)

            git.commit("some commit")

            for tag in tags:
                git.tag(tag)

            tags = git.list_tags()
            self.assertEqual(
                tags,
                [
                    "v0.6.3a1",
                    "v0.6.4",
                    "v0.6.5",
                    "v0.6.5a1",
                    "v0.6.5a2",
                    "v0.6.5a3",
                    "v0.6.5b1",
                    "v0.6.5rc1",
                    "v1.0.0",
                ],
            )

            tags = git.list_tags(sort=TagSort.VERSION)
            self.assertEqual(
                tags,
                [
                    "v0.6.3a1",
                    "v0.6.4",
                    "v0.6.5",
                    "v0.6.5a1",
                    "v0.6.5a2",
                    "v0.6.5a3",
                    "v0.6.5b1",
                    "v0.6.5rc1",
                    "v1.0.0",
                ],
            )

            tags = git.list_tags(
                sort=TagSort.VERSION, sort_suffix=DEFAULT_TAG_SORT_SUFFIX
            )
            self.assertEqual(
                tags,
                [
                    "v0.6.3a1",
                    "v0.6.4",
                    "v0.6.5a1",
                    "v0.6.5a2",
                    "v0.6.5a3",
                    "v0.6.5b1",
                    "v0.6.5rc1",
                    "v0.6.5",
                    "v1.0.0",
                ],
            )

    def test_git_status(self):
        with temp_git_repository() as tmp_git:
            tracked_file = tmp_git / "foo.json"
            tracked_file.write_text("sed diam nonumy eirmod", encoding="utf8")
            changed_file = tmp_git / "bar.json"
            changed_file.touch()
            staged_changed_file = tmp_git / "ipsum.json"
            staged_changed_file.write_text("tempor invidunt ut labore")
            removed_file = tmp_git / "lorem.json"
            removed_file.write_text(
                "consetetur sadipscing elitr", encoding="utf8"
            )
            renamed_file = tmp_git / "foo.md"
            renamed_file.write_text(
                "et dolore magna aliquyam erat", encoding="utf8"
            )

            git = Git(tmp_git)
            git.config("commit.gpgSign", "false", scope=ConfigScope.LOCAL)
            git.add(
                [
                    tracked_file,
                    changed_file,
                    staged_changed_file,
                    removed_file,
                    renamed_file,
                ]
            )
            git.commit("Some commit")

            changed_file.write_text("Lorem Ipsum", encoding="utf8")
            staged_changed_file.write_text("Lorem Ipsum", encoding="utf8")

            added_file = tmp_git / "foo.txt"
            added_file.touch()

            added_modified_file = tmp_git / "ipsum.txt"
            added_modified_file.touch()

            git.add([added_file, staged_changed_file, added_modified_file])

            staged_changed_file.write_text("Dolor sit", encoding="utf8")

            added_modified_file.write_text("Lorem Ipsum", encoding="utf8")

            git.move(renamed_file, "foo.rst")
            git.remove(removed_file)

            untracked_file = tmp_git / "bar.txt"
            untracked_file.touch()

            it = git.status()

            status = next(it)
            self.assertEqual(status.index, Status.UNMODIFIED)
            self.assertEqual(status.working_tree, Status.MODIFIED)
            self.assertEqual(status.path, Path("bar.json"))

            status = next(it)
            self.assertEqual(status.index, Status.RENAMED)
            self.assertEqual(status.working_tree, Status.UNMODIFIED)
            self.assertEqual(status.path, Path("foo.rst"))
            self.assertEqual(status.old_path, Path("foo.md"))

            status = next(it)
            self.assertEqual(status.index, Status.ADDED)
            self.assertEqual(status.working_tree, Status.UNMODIFIED)
            self.assertEqual(status.path, Path("foo.txt"))

            status = next(it)
            self.assertEqual(status.index, Status.MODIFIED)
            self.assertEqual(status.working_tree, Status.MODIFIED)
            self.assertEqual(status.path, Path("ipsum.json"))

            status = next(it)
            self.assertEqual(status.index, Status.ADDED)
            self.assertEqual(status.working_tree, Status.MODIFIED)
            self.assertEqual(status.path, Path("ipsum.txt"))

            status = next(it)
            self.assertEqual(status.index, Status.DELETED)
            self.assertEqual(status.working_tree, Status.UNMODIFIED)
            self.assertEqual(status.path, Path("lorem.json"))

            with self.assertRaises(StopIteration):
                next(it)

    def test_git_error(self):
        with (
            temp_directory(change_into=True),
            self.assertRaises(GitError) as cm,
        ):
            Git().log()

        self.assertEqual(cm.exception.returncode, 128)
        self.assertEqual(
            cm.exception.stderr,
            "fatal: not a git repository (or any of the parent directories): "
            ".git\n",
        )
        self.assertEqual(cm.exception.stdout, "")
        self.assertEqual(cm.exception.cmd, ["git", "log"])
pontos-25.3.2/tests/git/test_status.py000066400000000000000000000067761476255566300200510ustar00rootroot00000000000000# Copyright (C) 2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later
#

import os
import unittest
from pathlib import Path

from pontos.git._status import Status, StatusEntry, parse_git_status


class StatusEntryTestCase(unittest.TestCase):
    def test_parse_modified_modified(self):
        status = StatusEntry("MM foo.txt")

        self.assertEqual(status.index, Status.MODIFIED)
        self.assertEqual(status.working_tree, Status.MODIFIED)
        self.assertEqual(status.path, Path("foo.txt"))

    def test_parse_modified_unmodified(self):
        status = StatusEntry("M  foo.txt")

        self.assertEqual(status.index, Status.MODIFIED)
        self.assertEqual(status.working_tree, Status.UNMODIFIED)
        self.assertEqual(status.path, Path("foo.txt"))

    def test_parse_deleted(self):
        status = StatusEntry("D  foo.txt")

        self.assertEqual(status.index, Status.DELETED)
        self.assertEqual(status.working_tree, Status.UNMODIFIED)
        self.assertEqual(status.path, Path("foo.txt"))

    def test_parse_added(self):
        status = StatusEntry("A  foo.txt")

        self.assertEqual(status.index, Status.ADDED)
        self.assertEqual(status.working_tree, Status.UNMODIFIED)
        self.assertEqual(status.path, Path("foo.txt"))

    def test_parse_untracked(self):
        status = StatusEntry("?? foo.txt")

        self.assertEqual(status.index, Status.UNTRACKED)
        self.assertEqual(status.working_tree, Status.UNTRACKED)
        self.assertEqual(status.path, Path("foo.txt"))

    def test_parse_ignored_untracked(self):
        status = StatusEntry("!? foo.txt")

        self.assertEqual(status.index, Status.IGNORED)
        self.assertEqual(status.working_tree, Status.UNTRACKED)
        self.assertEqual(status.path, Path("foo.txt"))

    def test_pathlike(self):
        status = StatusEntry("MM foo.txt")
        self.assertEqual(os.fspath(status), "foo.txt")


class ParseGitStatusTestCase(unittest.TestCase):
    def test_parse_git_status(self):
        output = (
            b" M bar.json\x00R  foo.rst\x00foo.md\x00A  foo.txt\x00"
            b"MM ipsum.json\x00AM ipsum.txt\x00D  lorem.json\x00"
        )

        it = parse_git_status(output.decode("utf-8"))

        status = next(it)
        self.assertEqual(status.index, Status.UNMODIFIED)
        self.assertEqual(status.working_tree, Status.MODIFIED)
        self.assertEqual(status.path, Path("bar.json"))

        status = next(it)
        self.assertEqual(status.index, Status.RENAMED)
        self.assertEqual(status.working_tree, Status.UNMODIFIED)
        self.assertEqual(status.path, Path("foo.rst"))
        self.assertEqual(status.old_path, Path("foo.md"))

        status = next(it)
        self.assertEqual(status.index, Status.ADDED)
        self.assertEqual(status.working_tree, Status.UNMODIFIED)
        self.assertEqual(status.path, Path("foo.txt"))

        status = next(it)
        self.assertEqual(status.index, Status.MODIFIED)
        self.assertEqual(status.working_tree, Status.MODIFIED)
        self.assertEqual(status.path, Path("ipsum.json"))

        status = next(it)
        self.assertEqual(status.index, Status.ADDED)
        self.assertEqual(status.working_tree, Status.MODIFIED)
        self.assertEqual(status.path, Path("ipsum.txt"))

        status = next(it)
        self.assertEqual(status.index, Status.DELETED)
        self.assertEqual(status.working_tree, Status.UNMODIFIED)
        self.assertEqual(status.path, Path("lorem.json"))

        with self.assertRaises(StopIteration):
            next(it)
pontos-25.3.2/tests/github/000077500000000000000000000000001476255566300155745ustar00rootroot00000000000000pontos-25.3.2/tests/github/__init__.py000066400000000000000000000001411476255566300177010ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later
#
pontos-25.3.2/tests/github/actions/000077500000000000000000000000001476255566300172345ustar00rootroot00000000000000pontos-25.3.2/tests/github/actions/__init__.py000066400000000000000000000001411476255566300213410ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later
#
pontos-25.3.2/tests/github/actions/test-pull-request-event.json000066400000000000000000000716741476255566300247040ustar00rootroot00000000000000{
  "action": "labeled",
  "label": {
    "color": "a2eeef",
    "default": true,
    "description": "New feature or request",
    "id": 3665050984,
    "name": "enhancement",
    "node_id": "LA_kwDOGkcHwM7adD1o",
    "url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/labels/enhancement"
  },
  "number": 1,
  "pull_request": {
    "_links": {
      "comments": {
        "href": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/issues/1/comments"
      },
      "commits": {
        "href": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/pulls/1/commits"
      },
      "html": {
        "href": "https://github.com/bjoernricks/pull-request-backport-action/pull/1"
      },
      "issue": {
        "href": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/issues/1"
      },
      "review_comment": {
        "href": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/pulls/comments{/number}"
      },
      "review_comments": {
        "href": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/pulls/1/comments"
      },
      "self": {
        "href": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/pulls/1"
      },
      "statuses": {
        "href": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/statuses/beeecfeee02f5a9e69c1f69dba6757df6e5c20f7"
      }
    },
    "active_lock_reason": null,
    "additions": 25,
    "assignee": null,
    "assignees": [],
    "author_association": "OWNER",
    "auto_merge": null,
    "base": {
      "label": "bjoernricks:main",
      "ref": "main",
      "repo": {
        "allow_auto_merge": false,
        "allow_forking": true,
        "allow_merge_commit": true,
        "allow_rebase_merge": true,
        "allow_squash_merge": true,
        "allow_update_branch": false,
        "archive_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/{archive_format}{/ref}",
        "archived": false,
        "assignees_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/assignees{/user}",
        "blobs_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/git/blobs{/sha}",
        "branches_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/branches{/branch}",
        "clone_url": "https://github.com/bjoernricks/pull-request-backport-action.git",
        "collaborators_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/collaborators{/collaborator}",
        "comments_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/comments{/number}",
        "commits_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/commits{/sha}",
        "compare_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/compare/{base}...{head}",
        "contents_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/contents/{+path}",
        "contributors_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/contributors",
        "created_at": "2021-12-22T13:13:11Z",
        "default_branch": "main",
        "delete_branch_on_merge": false,
        "deployments_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/deployments",
        "description": null,
        "disabled": false,
        "downloads_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/downloads",
        "events_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/events",
        "fork": false,
        "forks": 0,
        "forks_count": 0,
        "forks_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/forks",
        "full_name": "bjoernricks/pull-request-backport-action",
        "git_commits_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/git/commits{/sha}",
        "git_refs_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/git/refs{/sha}",
        "git_tags_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/git/tags{/sha}",
        "git_url": "git://github.com/bjoernricks/pull-request-backport-action.git",
        "has_downloads": true,
        "has_issues": true,
        "has_pages": false,
        "has_projects": true,
        "has_wiki": true,
        "homepage": null,
        "hooks_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/hooks",
        "html_url": "https://github.com/bjoernricks/pull-request-backport-action",
        "id": 440862656,
        "is_template": false,
        "issue_comment_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/issues/comments{/number}",
        "issue_events_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/issues/events{/number}",
        "issues_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/issues{/number}",
        "keys_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/keys{/key_id}",
        "labels_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/labels{/name}",
        "language": "Python",
        "languages_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/languages",
        "license": {
          "key": "gpl-3.0",
          "name": "GNU General Public License v3.0",
          "node_id": "MDc6TGljZW5zZTk=",
          "spdx_id": "GPL-3.0",
          "url": "https://api.github.com/licenses/gpl-3.0"
        },
        "merges_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/merges",
        "milestones_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/milestones{/number}",
        "mirror_url": null,
        "name": "pull-request-backport-action",
        "node_id": "R_kgDOGkcHwA",
        "notifications_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/notifications{?since,all,participating}",
        "open_issues": 1,
        "open_issues_count": 1,
        "owner": {
          "avatar_url": "https://avatars.githubusercontent.com/u/897575?v=4",
          "events_url": "https://api.github.com/users/bjoernricks/events{/privacy}",
          "followers_url": "https://api.github.com/users/bjoernricks/followers",
          "following_url": "https://api.github.com/users/bjoernricks/following{/other_user}",
          "gists_url": "https://api.github.com/users/bjoernricks/gists{/gist_id}",
          "gravatar_id": "",
          "html_url": "https://github.com/bjoernricks",
          "id": 897575,
          "login": "bjoernricks",
          "node_id": "MDQ6VXNlcjg5NzU3NQ==",
          "organizations_url": "https://api.github.com/users/bjoernricks/orgs",
          "received_events_url": "https://api.github.com/users/bjoernricks/received_events",
          "repos_url": "https://api.github.com/users/bjoernricks/repos",
          "site_admin": false,
          "starred_url": "https://api.github.com/users/bjoernricks/starred{/owner}{/repo}",
          "subscriptions_url": "https://api.github.com/users/bjoernricks/subscriptions",
          "type": "User",
          "url": "https://api.github.com/users/bjoernricks"
        },
        "private": false,
        "pulls_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/pulls{/number}",
        "pushed_at": "2021-12-23T06:59:02Z",
        "releases_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/releases{/id}",
        "size": 41,
        "ssh_url": "git@github.com:bjoernricks/pull-request-backport-action.git",
        "stargazers_count": 0,
        "stargazers_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/stargazers",
        "statuses_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/statuses/{sha}",
        "subscribers_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/subscribers",
        "subscription_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/subscription",
        "svn_url": "https://github.com/bjoernricks/pull-request-backport-action",
        "tags_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/tags",
        "teams_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/teams",
        "topics": [],
        "trees_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/git/trees{/sha}",
        "updated_at": "2021-12-22T17:28:48Z",
        "url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action",
        "visibility": "public",
        "watchers": 0,
        "watchers_count": 0
      },
      "sha": "50c0fbe90d0c19dba08165b707e8b720d604ed5d",
      "user": {
        "avatar_url": "https://avatars.githubusercontent.com/u/897575?v=4",
        "events_url": "https://api.github.com/users/bjoernricks/events{/privacy}",
        "followers_url": "https://api.github.com/users/bjoernricks/followers",
        "following_url": "https://api.github.com/users/bjoernricks/following{/other_user}",
        "gists_url": "https://api.github.com/users/bjoernricks/gists{/gist_id}",
        "gravatar_id": "",
        "html_url": "https://github.com/bjoernricks",
        "id": 897575,
        "login": "bjoernricks",
        "node_id": "MDQ6VXNlcjg5NzU3NQ==",
        "organizations_url": "https://api.github.com/users/bjoernricks/orgs",
        "received_events_url": "https://api.github.com/users/bjoernricks/received_events",
        "repos_url": "https://api.github.com/users/bjoernricks/repos",
        "site_admin": false,
        "starred_url": "https://api.github.com/users/bjoernricks/starred{/owner}{/repo}",
        "subscriptions_url": "https://api.github.com/users/bjoernricks/subscriptions",
        "type": "User",
        "url": "https://api.github.com/users/bjoernricks"
      }
    },
    "body": null,
    "changed_files": 3,
    "closed_at": null,
    "comments": 0,
    "comments_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/issues/1/comments",
    "commits": 3,
    "commits_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/pulls/1/commits",
    "created_at": "2021-12-23T06:45:17Z",
    "deletions": 1,
    "diff_url": "https://github.com/bjoernricks/pull-request-backport-action/pull/1.diff",
    "draft": false,
    "head": {
      "label": "bjoernricks:label-test",
      "ref": "label-test",
      "repo": {
        "allow_auto_merge": false,
        "allow_forking": true,
        "allow_merge_commit": true,
        "allow_rebase_merge": true,
        "allow_squash_merge": true,
        "allow_update_branch": false,
        "archive_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/{archive_format}{/ref}",
        "archived": false,
        "assignees_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/assignees{/user}",
        "blobs_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/git/blobs{/sha}",
        "branches_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/branches{/branch}",
        "clone_url": "https://github.com/bjoernricks/pull-request-backport-action.git",
        "collaborators_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/collaborators{/collaborator}",
        "comments_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/comments{/number}",
        "commits_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/commits{/sha}",
        "compare_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/compare/{base}...{head}",
        "contents_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/contents/{+path}",
        "contributors_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/contributors",
        "created_at": "2021-12-22T13:13:11Z",
        "default_branch": "main",
        "delete_branch_on_merge": false,
        "deployments_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/deployments",
        "description": null,
        "disabled": false,
        "downloads_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/downloads",
        "events_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/events",
        "fork": false,
        "forks": 0,
        "forks_count": 0,
        "forks_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/forks",
        "full_name": "bjoernricks/pull-request-backport-action",
        "git_commits_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/git/commits{/sha}",
        "git_refs_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/git/refs{/sha}",
        "git_tags_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/git/tags{/sha}",
        "git_url": "git://github.com/bjoernricks/pull-request-backport-action.git",
        "has_downloads": true,
        "has_issues": true,
        "has_pages": false,
        "has_projects": true,
        "has_wiki": true,
        "homepage": null,
        "hooks_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/hooks",
        "html_url": "https://github.com/bjoernricks/pull-request-backport-action",
        "id": 440862656,
        "is_template": false,
        "issue_comment_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/issues/comments{/number}",
        "issue_events_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/issues/events{/number}",
        "issues_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/issues{/number}",
        "keys_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/keys{/key_id}",
        "labels_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/labels{/name}",
        "language": "Python",
        "languages_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/languages",
        "license": {
          "key": "gpl-3.0",
          "name": "GNU General Public License v3.0",
          "node_id": "MDc6TGljZW5zZTk=",
          "spdx_id": "GPL-3.0",
          "url": "https://api.github.com/licenses/gpl-3.0"
        },
        "merges_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/merges",
        "milestones_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/milestones{/number}",
        "mirror_url": null,
        "name": "pull-request-backport-action",
        "node_id": "R_kgDOGkcHwA",
        "notifications_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/notifications{?since,all,participating}",
        "open_issues": 1,
        "open_issues_count": 1,
        "owner": {
          "avatar_url": "https://avatars.githubusercontent.com/u/897575?v=4",
          "events_url": "https://api.github.com/users/bjoernricks/events{/privacy}",
          "followers_url": "https://api.github.com/users/bjoernricks/followers",
          "following_url": "https://api.github.com/users/bjoernricks/following{/other_user}",
          "gists_url": "https://api.github.com/users/bjoernricks/gists{/gist_id}",
          "gravatar_id": "",
          "html_url": "https://github.com/bjoernricks",
          "id": 897575,
          "login": "bjoernricks",
          "node_id": "MDQ6VXNlcjg5NzU3NQ==",
          "organizations_url": "https://api.github.com/users/bjoernricks/orgs",
          "received_events_url": "https://api.github.com/users/bjoernricks/received_events",
          "repos_url": "https://api.github.com/users/bjoernricks/repos",
          "site_admin": false,
          "starred_url": "https://api.github.com/users/bjoernricks/starred{/owner}{/repo}",
          "subscriptions_url": "https://api.github.com/users/bjoernricks/subscriptions",
          "type": "User",
          "url": "https://api.github.com/users/bjoernricks"
        },
        "private": false,
        "pulls_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/pulls{/number}",
        "pushed_at": "2021-12-23T06:59:02Z",
        "releases_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/releases{/id}",
        "size": 41,
        "ssh_url": "git@github.com:bjoernricks/pull-request-backport-action.git",
        "stargazers_count": 0,
        "stargazers_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/stargazers",
        "statuses_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/statuses/{sha}",
        "subscribers_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/subscribers",
        "subscription_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/subscription",
        "svn_url": "https://github.com/bjoernricks/pull-request-backport-action",
        "tags_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/tags",
        "teams_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/teams",
        "topics": [],
        "trees_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/git/trees{/sha}",
        "updated_at": "2021-12-22T17:28:48Z",
        "url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action",
        "visibility": "public",
        "watchers": 0,
        "watchers_count": 0
      },
      "sha": "beeecfeee02f5a9e69c1f69dba6757df6e5c20f7",
      "user": {
        "avatar_url": "https://avatars.githubusercontent.com/u/897575?v=4",
        "events_url": "https://api.github.com/users/bjoernricks/events{/privacy}",
        "followers_url": "https://api.github.com/users/bjoernricks/followers",
        "following_url": "https://api.github.com/users/bjoernricks/following{/other_user}",
        "gists_url": "https://api.github.com/users/bjoernricks/gists{/gist_id}",
        "gravatar_id": "",
        "html_url": "https://github.com/bjoernricks",
        "id": 897575,
        "login": "bjoernricks",
        "node_id": "MDQ6VXNlcjg5NzU3NQ==",
        "organizations_url": "https://api.github.com/users/bjoernricks/orgs",
        "received_events_url": "https://api.github.com/users/bjoernricks/received_events",
        "repos_url": "https://api.github.com/users/bjoernricks/repos",
        "site_admin": false,
        "starred_url": "https://api.github.com/users/bjoernricks/starred{/owner}{/repo}",
        "subscriptions_url": "https://api.github.com/users/bjoernricks/subscriptions",
        "type": "User",
        "url": "https://api.github.com/users/bjoernricks"
      }
    },
    "html_url": "https://github.com/bjoernricks/pull-request-backport-action/pull/1",
    "id": 808914229,
    "issue_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/issues/1",
    "labels": [
      {
        "color": "a2eeef",
        "default": true,
        "description": "New feature or request",
        "id": 3665050984,
        "name": "enhancement",
        "node_id": "LA_kwDOGkcHwM7adD1o",
        "url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/labels/enhancement"
      }
    ],
    "locked": false,
    "maintainer_can_modify": false,
    "merge_commit_sha": "0e34b424155397fe4cfc6a52db4b4c4a7961db79",
    "mergeable": true,
    "mergeable_state": "unstable",
    "merged": false,
    "merged_at": null,
    "merged_by": null,
    "milestone": null,
    "node_id": "PR_kwDOGkcHwM4wNw01",
    "number": 1,
    "patch_url": "https://github.com/bjoernricks/pull-request-backport-action/pull/1.patch",
    "rebaseable": true,
    "requested_reviewers": [],
    "requested_teams": [],
    "review_comment_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/pulls/comments{/number}",
    "review_comments": 0,
    "review_comments_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/pulls/1/comments",
    "state": "open",
    "statuses_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/statuses/beeecfeee02f5a9e69c1f69dba6757df6e5c20f7",
    "title": "Add foo for bar",
    "updated_at": "2021-12-23T06:59:30Z",
    "url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/pulls/1",
    "user": {
      "avatar_url": "https://avatars.githubusercontent.com/u/897575?v=4",
      "events_url": "https://api.github.com/users/bjoernricks/events{/privacy}",
      "followers_url": "https://api.github.com/users/bjoernricks/followers",
      "following_url": "https://api.github.com/users/bjoernricks/following{/other_user}",
      "gists_url": "https://api.github.com/users/bjoernricks/gists{/gist_id}",
      "gravatar_id": "",
      "html_url": "https://github.com/bjoernricks",
      "id": 897575,
      "login": "bjoernricks",
      "node_id": "MDQ6VXNlcjg5NzU3NQ==",
      "organizations_url": "https://api.github.com/users/bjoernricks/orgs",
      "received_events_url": "https://api.github.com/users/bjoernricks/received_events",
      "repos_url": "https://api.github.com/users/bjoernricks/repos",
      "site_admin": false,
      "starred_url": "https://api.github.com/users/bjoernricks/starred{/owner}{/repo}",
      "subscriptions_url": "https://api.github.com/users/bjoernricks/subscriptions",
      "type": "User",
      "url": "https://api.github.com/users/bjoernricks"
    }
  },
  "repository": {
    "allow_forking": true,
    "archive_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/{archive_format}{/ref}",
    "archived": false,
    "assignees_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/assignees{/user}",
    "blobs_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/git/blobs{/sha}",
    "branches_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/branches{/branch}",
    "clone_url": "https://github.com/bjoernricks/pull-request-backport-action.git",
    "collaborators_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/collaborators{/collaborator}",
    "comments_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/comments{/number}",
    "commits_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/commits{/sha}",
    "compare_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/compare/{base}...{head}",
    "contents_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/contents/{+path}",
    "contributors_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/contributors",
    "created_at": "2021-12-22T13:13:11Z",
    "default_branch": "main",
    "deployments_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/deployments",
    "description": null,
    "disabled": false,
    "downloads_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/downloads",
    "events_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/events",
    "fork": false,
    "forks": 0,
    "forks_count": 0,
    "forks_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/forks",
    "full_name": "bjoernricks/pull-request-backport-action",
    "git_commits_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/git/commits{/sha}",
    "git_refs_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/git/refs{/sha}",
    "git_tags_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/git/tags{/sha}",
    "git_url": "git://github.com/bjoernricks/pull-request-backport-action.git",
    "has_downloads": true,
    "has_issues": true,
    "has_pages": false,
    "has_projects": true,
    "has_wiki": true,
    "homepage": null,
    "hooks_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/hooks",
    "html_url": "https://github.com/bjoernricks/pull-request-backport-action",
    "id": 440862656,
    "is_template": false,
    "issue_comment_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/issues/comments{/number}",
    "issue_events_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/issues/events{/number}",
    "issues_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/issues{/number}",
    "keys_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/keys{/key_id}",
    "labels_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/labels{/name}",
    "language": "Python",
    "languages_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/languages",
    "license": {
      "key": "gpl-3.0",
      "name": "GNU General Public License v3.0",
      "node_id": "MDc6TGljZW5zZTk=",
      "spdx_id": "GPL-3.0",
      "url": "https://api.github.com/licenses/gpl-3.0"
    },
    "merges_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/merges",
    "milestones_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/milestones{/number}",
    "mirror_url": null,
    "name": "pull-request-backport-action",
    "node_id": "R_kgDOGkcHwA",
    "notifications_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/notifications{?since,all,participating}",
    "open_issues": 1,
    "open_issues_count": 1,
    "owner": {
      "avatar_url": "https://avatars.githubusercontent.com/u/897575?v=4",
      "events_url": "https://api.github.com/users/bjoernricks/events{/privacy}",
      "followers_url": "https://api.github.com/users/bjoernricks/followers",
      "following_url": "https://api.github.com/users/bjoernricks/following{/other_user}",
      "gists_url": "https://api.github.com/users/bjoernricks/gists{/gist_id}",
      "gravatar_id": "",
      "html_url": "https://github.com/bjoernricks",
      "id": 897575,
      "login": "bjoernricks",
      "node_id": "MDQ6VXNlcjg5NzU3NQ==",
      "organizations_url": "https://api.github.com/users/bjoernricks/orgs",
      "received_events_url": "https://api.github.com/users/bjoernricks/received_events",
      "repos_url": "https://api.github.com/users/bjoernricks/repos",
      "site_admin": false,
      "starred_url": "https://api.github.com/users/bjoernricks/starred{/owner}{/repo}",
      "subscriptions_url": "https://api.github.com/users/bjoernricks/subscriptions",
      "type": "User",
      "url": "https://api.github.com/users/bjoernricks"
    },
    "private": false,
    "pulls_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/pulls{/number}",
    "pushed_at": "2021-12-23T06:59:02Z",
    "releases_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/releases{/id}",
    "size": 41,
    "ssh_url": "git@github.com:bjoernricks/pull-request-backport-action.git",
    "stargazers_count": 0,
    "stargazers_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/stargazers",
    "statuses_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/statuses/{sha}",
    "subscribers_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/subscribers",
    "subscription_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/subscription",
    "svn_url": "https://github.com/bjoernricks/pull-request-backport-action",
    "tags_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/tags",
    "teams_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/teams",
    "topics": [],
    "trees_url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action/git/trees{/sha}",
    "updated_at": "2021-12-22T17:28:48Z",
    "url": "https://api.github.com/repos/bjoernricks/pull-request-backport-action",
    "visibility": "public",
    "watchers": 0,
    "watchers_count": 0
  },
  "sender": {
    "avatar_url": "https://avatars.githubusercontent.com/u/897575?v=4",
    "events_url": "https://api.github.com/users/bjoernricks/events{/privacy}",
    "followers_url": "https://api.github.com/users/bjoernricks/followers",
    "following_url": "https://api.github.com/users/bjoernricks/following{/other_user}",
    "gists_url": "https://api.github.com/users/bjoernricks/gists{/gist_id}",
    "gravatar_id": "",
    "html_url": "https://github.com/bjoernricks",
    "id": 897575,
    "login": "bjoernricks",
    "node_id": "MDQ6VXNlcjg5NzU3NQ==",
    "organizations_url": "https://api.github.com/users/bjoernricks/orgs",
    "received_events_url": "https://api.github.com/users/bjoernricks/received_events",
    "repos_url": "https://api.github.com/users/bjoernricks/repos",
    "site_admin": false,
    "starred_url": "https://api.github.com/users/bjoernricks/starred{/owner}{/repo}",
    "subscriptions_url": "https://api.github.com/users/bjoernricks/subscriptions",
    "type": "User",
    "url": "https://api.github.com/users/bjoernricks"
  }
}
pontos-25.3.2/tests/github/actions/test_core.py000066400000000000000000000126161476255566300216030ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2021-2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later
#

import unittest
from unittest.mock import patch

from pontos.github.actions.core import ActionIO, Console
from pontos.github.actions.errors import GitHubActionsError
from pontos.testing import temp_directory


@patch("builtins.print")
class ConsoleTestCase(unittest.TestCase):
    def test_start_group(self, print_mock):
        Console.start_group("Foo")
        print_mock.assert_called_once_with("::group::Foo")

    def test_end_group(self, print_mock):
        Console.end_group()
        print_mock.assert_called_once_with("::endgroup::")

    def test_group(self, print_mock):
        with Console.group("Foo"):
            print_mock.assert_called_once_with("::group::Foo")

        print_mock.assert_called_with("::endgroup::")

    def test_warning(self, print_mock):
        Console.warning(
            "foo",
            name="bar",
            line="123",
            end_line="234",
            column="1",
            end_column="2",
            title="Foo Bar",
        )
        print_mock.assert_called_once_with(
            "::warning file=bar,line=123,endLine=234,col=1,endColumn=2,title=Foo Bar::foo"  # pylint: disable=line-too-long # noqa: E501
        )

    def test_error(self, print_mock):
        Console.error(
            "foo",
            name="bar",
            line="123",
            end_line="234",
            column="1",
            end_column="2",
            title="Foo Bar",
        )
        print_mock.assert_called_once_with(
            "::error file=bar,line=123,endLine=234,col=1,endColumn=2,title=Foo Bar::foo"  # pylint: disable=line-too-long # noqa: E501
        )

    def test_notice(self, print_mock):
        Console.notice(
            "foo",
            name="bar",
            line="123",
            end_line="234",
            column="1",
            end_column="2",
            title="Foo Bar",
        )
        print_mock.assert_called_once_with(
            "::notice file=bar,line=123,endLine=234,col=1,endColumn=2,title=Foo Bar::foo"  # pylint: disable=line-too-long # noqa: E501
        )

    def test_log(self, print_mock):
        Console.log("foo")

        print_mock.assert_called_once_with("foo")

    def test_debug(self, print_mock):
        Console.debug("foo")

        print_mock.assert_called_once_with("::debug::foo")


class ActionIOTestCase(unittest.TestCase):
    @patch.dict(
        "os.environ", {"INPUT_FOO": "1234", "INPUT_FOO_BAR": "2345"}, clear=True
    )
    def test_input(self):
        self.assertEqual(ActionIO.input("foo"), "1234")
        self.assertEqual(ActionIO.input("FOO"), "1234")
        self.assertEqual(ActionIO.input("FoO"), "1234")

        self.assertEqual(ActionIO.input("foo bar"), "2345")
        self.assertEqual(ActionIO.input("FOO_BAR"), "2345")
        self.assertEqual(ActionIO.input("FoO BaR"), "2345")

    def test_output(self):
        with temp_directory() as temp_dir:
            file_path = temp_dir / "github.output"

            with patch.dict(
                "os.environ", {"GITHUB_OUTPUT": str(file_path)}, clear=True
            ):
                ActionIO.output("foo", "bar")
                ActionIO.output("lorem", "ipsum")

                output = file_path.read_text(encoding="utf8")

                self.assertEqual(output, "foo=bar\nlorem=ipsum\n")

    @patch("uuid.uuid1")
    def test_multiline_output(self, uuid_mock):
        deadbeef = "deadbeef"
        name = "foo"
        ml_string = """bar
baz
boing"""
        expected_output = f"{name}<<{deadbeef}{ml_string}{deadbeef}"
        uuid_mock.return_value = deadbeef
        with temp_directory() as temp_dir:
            file_path = temp_dir / "github.output"

            with patch.dict(
                "os.environ", {"GITHUB_OUTPUT": str(file_path)}, clear=True
            ):
                ActionIO.multiline_output("foo", ml_string)

                output = file_path.read_text(encoding="utf8")

                self.assertEqual(output, expected_output)

    @patch.dict("os.environ", {}, clear=True)
    def test_output_no_env(self):
        with self.assertRaises(GitHubActionsError):
            ActionIO.output("foo", "bar")

    @patch.dict("os.environ", {"GITHUB_OUTPUT": ""}, clear=True)
    def test_output_empty_env(self):
        with self.assertRaises(GitHubActionsError):
            ActionIO.output("foo", "bar")

    @patch.dict("os.environ", {}, clear=True)
    def test_no_github_output(self):
        self.assertFalse(ActionIO.has_output())

    @patch.dict(
        "os.environ", {"GITHUB_OUTPUT": "/foo/github.output"}, clear=True
    )
    def test_has_github_output(self):
        self.assertTrue(ActionIO.has_output())

    def test_out(self):
        with temp_directory() as temp_dir:
            outfile = temp_dir / "github.output"
            with patch.dict(
                "os.environ",
                {"GITHUB_OUTPUT": str(outfile.absolute())},
                clear=True,
            ):
                with ActionIO.out() as output:
                    output.write("foo", "bar")

            self.assertEqual(outfile.read_text(encoding="utf8"), "foo=bar\n")

    @patch.dict("os.environ", {}, clear=True)
    def test_out_failure(self):
        with self.assertRaisesRegex(
            GitHubActionsError,
            "GITHUB_OUTPUT environment variable not set. Can't write "
            "action output.",
        ):
            with ActionIO.out():
                pass
pontos-25.3.2/tests/github/actions/test_env.py000066400000000000000000000055431476255566300214440ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later
#

import unittest
from pathlib import Path
from unittest.mock import patch

from pontos.github.actions.env import GitHubEnvironment


class GitHubEnvironmentTestCase(unittest.TestCase):
    @patch.dict("os.environ", {"GITHUB_WORKSPACE": "/foo/bar"}, clear=True)
    def test_workspace(self):
        env = GitHubEnvironment()
        self.assertEqual(env.workspace, Path("/foo/bar"))

    @patch.dict("os.environ", {"GITHUB_REPOSITORY": "foo/bar"}, clear=True)
    def test_repository(self):
        env = GitHubEnvironment()
        self.assertEqual(env.repository, "foo/bar")

    @patch.dict("os.environ", {"GITHUB_SHA": "123456"}, clear=True)
    def test_sha(self):
        env = GitHubEnvironment()
        self.assertEqual(env.sha, "123456")

    @patch.dict("os.environ", {"GITHUB_REF": "ref/branches/main"}, clear=True)
    def test_ref(self):
        env = GitHubEnvironment()
        self.assertEqual(env.ref, "ref/branches/main")

    @patch.dict("os.environ", {"GITHUB_REF_NAME": "main"}, clear=True)
    def test_ref_name(self):
        env = GitHubEnvironment()
        self.assertEqual(env.ref_name, "main")

    @patch.dict("os.environ", {"GITHUB_EVENT_PATH": "/foo/bar"}, clear=True)
    def test_event_path(self):
        env = GitHubEnvironment()
        self.assertEqual(env.event_path, Path("/foo/bar"))

    @patch.dict("os.environ", {"GITHUB_HEAD_REF": "foo"}, clear=True)
    def test_head_ref(self):
        env = GitHubEnvironment()
        self.assertEqual(env.head_ref, "foo")

    @patch.dict("os.environ", {"GITHUB_BASE_REF": "main"}, clear=True)
    def test_base_ref(self):
        env = GitHubEnvironment()
        self.assertEqual(env.base_ref, "main")

    @patch.dict(
        "os.environ", {"GITHUB_API_URL": "https://api.github.com"}, clear=True
    )
    def test_api_url(self):
        env = GitHubEnvironment()
        self.assertEqual(env.api_url, "https://api.github.com")

    @patch.dict("os.environ", {"GITHUB_ACTOR": "greenbonebot"}, clear=True)
    def test_actor(self):
        env = GitHubEnvironment()
        self.assertEqual(env.actor, "greenbonebot")

    @patch.dict("os.environ", {"GITHUB_RUN_ID": "12345"}, clear=True)
    def test_run_id(self):
        env = GitHubEnvironment()
        self.assertEqual(env.run_id, "12345")

    @patch.dict("os.environ", {"GITHUB_ACTION": "54321"}, clear=True)
    def test_action_id(self):
        env = GitHubEnvironment()
        self.assertEqual(env.action_id, "54321")

    @patch.dict("os.environ", {"RUNNER_DEBUG": "1"}, clear=True)
    def test_is_debug_enabled(self):
        env = GitHubEnvironment()
        self.assertTrue(env.is_debug)

    @patch.dict("os.environ", {"RUNNER_DEBUG": ""}, clear=True)
    def test_is_debug_disabled(self):
        env = GitHubEnvironment()
        self.assertFalse(env.is_debug)
pontos-25.3.2/tests/github/actions/test_event.py000066400000000000000000000026661476255566300220000ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2021-2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later
#

import unittest
from pathlib import Path

from pontos.github.actions.event import GitHubEvent, PullRequestState

here = Path(__file__).parent


class GitHubPullRequestEventTestCase(unittest.TestCase):
    def setUp(self) -> None:
        event_path = Path(here / "test-pull-request-event.json")
        event = GitHubEvent(event_path)
        self.pull_request = event.pull_request

    def test_draft(self):
        self.assertFalse(self.pull_request.draft)

    def test_labels(self):
        self.assertEqual(len(self.pull_request.labels), 1)
        self.assertEqual(self.pull_request.labels[0].name, "enhancement")

    def test_number(self):
        self.assertEqual(self.pull_request.number, 1)

    def test_title(self):
        self.assertEqual(self.pull_request.title, "Add foo for bar")

    def test_state(self):
        self.assertEqual(self.pull_request.state, PullRequestState.OPEN)

    def test_base(self):
        base = self.pull_request.base
        self.assertEqual(base.name, "main")
        self.assertEqual(base.sha, "50c0fbe90d0c19dba08165b707e8b720d604ed5d")

    def test_head(self):
        head = self.pull_request.head
        self.assertEqual(head.name, "label-test")
        self.assertEqual(head.sha, "beeecfeee02f5a9e69c1f69dba6757df6e5c20f7")

    def test_merged(self):
        self.assertEqual(self.pull_request.merged, False)
pontos-25.3.2/tests/github/api/000077500000000000000000000000001476255566300163455ustar00rootroot00000000000000pontos-25.3.2/tests/github/api/__init__.py000066400000000000000000000011451476255566300204570ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later
#

from unittest.mock import MagicMock

import httpx

from pontos.github.api.client import GitHubAsyncREST, GitHubAsyncRESTClient
from tests import AsyncMock, IsolatedAsyncioTestCase


def create_response(*args, **kwargs) -> MagicMock:
    return MagicMock(spec=httpx.Response, *args, **kwargs)


class GitHubAsyncRESTTestCase(IsolatedAsyncioTestCase):
    api_cls = GitHubAsyncREST

    def setUp(self) -> None:
        self.client = AsyncMock(spec=GitHubAsyncRESTClient)
        self.api = self.api_cls(self.client)
pontos-25.3.2/tests/github/api/pr-files.json000066400000000000000000000470651476255566300207750ustar00rootroot00000000000000[
    {
      "sha": "cad83468a60da7af39549d2ac07b86b51563618e",
      "filename": "gvm/protocols/gmpv2110/__init__.py",
      "status": "modified",
      "additions": 2,
      "deletions": 1,
      "changes": 3,
      "blob_url": "https://github.com/greenbone/python-gvm/blob/63b99dab803c778eff05783025334a61e58614e7/gvm/protocols/gmpv2110/__init__.py",
      "raw_url": "https://github.com/greenbone/python-gvm/raw/63b99dab803c778eff05783025334a61e58614e7/gvm/protocols/gmpv2110/__init__.py",
      "contents_url": "https://api.github.com/repos/greenbone/python-gvm/contents/gvm/protocols/gmpv2110/__init__.py?ref=63b99dab803c778eff05783025334a61e58614e7",
      "patch": "@@ -127,9 +127,10 @@\n     AliveTest,\n     TargetsMixin,\n )\n-from gvm.protocols.gmpv214.entities.users import UsersMixin\n \n # NEW IN 2110\n+from gvm.protocols.gmpv2110.entities.users import UsersMixin\n+\n from gvm.protocols.gmpv2110.system.version import VersionMixin\n \n from gvm.connections import GvmConnection"
    },
    {
      "sha": "f22f0818d145afaa3031fee6b85a434768b3d0ce",
      "filename": "gvm/protocols/gmpv2110/entities/users.py",
      "status": "added",
      "additions": 191,
      "deletions": 0,
      "changes": 191,
      "blob_url": "https://github.com/greenbone/python-gvm/blob/63b99dab803c778eff05783025334a61e58614e7/gvm/protocols/gmpv2110/entities/users.py",
      "raw_url": "https://github.com/greenbone/python-gvm/raw/63b99dab803c778eff05783025334a61e58614e7/gvm/protocols/gmpv2110/entities/users.py",
      "contents_url": "https://api.github.com/repos/greenbone/python-gvm/contents/gvm/protocols/gmpv2110/entities/users.py?ref=63b99dab803c778eff05783025334a61e58614e7",
      "patch": "@@ -0,0 +1,191 @@\n+# -*- coding: utf-8 -*-\n+# Copyright (C) 2022 Greenbone AG\n+#\n+# SPDX-License-Identifier: GPL-3.0-or-later\n+#\n+# This program is free software: you can redistribute it and/or modify\n+# it under the terms of the GNU General Public License as published by\n+# the Free Software Foundation, either version 3 of the License, or\n+# (at your option) any later version.\n+#\n+# This program is distributed in the hope that it will be useful,\n+# but WITHOUT ANY WARRANTY; without even the implied warranty of\n+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n+# GNU General Public License for more details.\n+#\n+# You should have received a copy of the GNU General Public License\n+# along with this program.  If not, see .\n+\n+# pylint: disable=arguments-differ, arguments-renamed\n+\n+from typing import Any, List, Optional\n+\n+from gvm.errors import RequiredArgument\n+from gvm.protocols.gmpv214.entities.users import (\n+    UsersMixin as Gmp214UsersMixin,\n+    UserAuthType,\n+)\n+from gvm.utils import deprecation, to_comma_list, to_bool\n+from gvm.xml import XmlCommand\n+\n+\n+class UsersMixin(Gmp214UsersMixin):\n+    def create_user(\n+        self,\n+        name: str,\n+        *,\n+        password: Optional[str] = None,\n+        hosts: Optional[List[str]] = None,\n+        hosts_allow: Optional[bool] = False,\n+        ifaces: Any = None,\n+        ifaces_allow: Any = None,\n+        role_ids: Optional[List[str]] = None,\n+    ) -> Any:\n+        \"\"\"Create a new user\n+\n+        Arguments:\n+            name: Name of the user\n+            password: Password of the user\n+            hosts: A list of host addresses (IPs, DNS names)\n+            hosts_allow: If True allow only access to passed hosts otherwise\n+                deny access. Default is False for deny hosts.\n+            ifaces: deprecated\n+            ifaces_allow: deprecated\n+            role_ids: A list of role UUIDs for the user\n+\n+        Returns:\n+            The response. See :py:meth:`send_command` for details.\n+        \"\"\"\n+        if not name:\n+            raise RequiredArgument(\n+                function=self.create_user.__name__, argument='name'\n+            )\n+\n+        cmd = XmlCommand(\"create_user\")\n+        cmd.add_element(\"name\", name)\n+\n+        if password:\n+            cmd.add_element(\"password\", password)\n+\n+        if hosts:\n+            cmd.add_element(\n+                \"hosts\",\n+                to_comma_list(hosts),\n+                attrs={\"allow\": to_bool(hosts_allow)},\n+            )\n+\n+        if ifaces is not None:\n+            major, minor = self.get_protocol_version()\n+            deprecation(\n+                \"The ifaces parameter has been removed in GMP\"\n+                f\" version {major}{minor}\"\n+            )\n+\n+        if ifaces_allow is not None:\n+            major, minor = self.get_protocol_version()\n+            deprecation(\n+                \"The ifaces_allow parameter has been removed in GMP\"\n+                f\" version {major}{minor}\"\n+            )\n+\n+        if role_ids:\n+            for role in role_ids:\n+                cmd.add_element(\"role\", attrs={\"id\": role})\n+\n+        return self._send_xml_command(cmd)\n+\n+    def modify_user(\n+        self,\n+        user_id: str = None,\n+        *,\n+        name: Optional[str] = None,\n+        comment: Optional[str] = None,\n+        password: Optional[str] = None,\n+        auth_source: Optional[UserAuthType] = None,\n+        role_ids: Optional[List[str]] = None,\n+        hosts: Optional[List[str]] = None,\n+        hosts_allow: Optional[bool] = False,\n+        ifaces: Any = None,\n+        ifaces_allow: Any = None,\n+        group_ids: Optional[List[str]] = None,\n+    ) -> Any:\n+        \"\"\"Modifies an existing user.\n+\n+        Most of the fields need to be supplied\n+        for changing a single field even if no change is wanted for those.\n+        Else empty values are inserted for the missing fields instead.\n+\n+        Arguments:\n+            user_id: UUID of the user to be modified.\n+            name: The new name for the user.\n+            comment: Comment on the user.\n+            password: The password for the user.\n+            auth_source: Source allowed for authentication for this user.\n+            roles_id: List of roles UUIDs for the user.\n+            hosts: User access rules: List of hosts.\n+            hosts_allow: Defines how the hosts list is to be interpreted.\n+                If False (default) the list is treated as a deny list.\n+                All hosts are allowed by default except those provided by\n+                the hosts parameter. If True the list is treated as a\n+                allow list. All hosts are denied by default except those\n+                provided by the hosts parameter.\n+            ifaces: deprecated\n+            ifaces_allow: deprecated\n+            group_ids: List of group UUIDs for the user.\n+\n+        Returns:\n+            The response. See :py:meth:`send_command` for details.\n+        \"\"\"\n+        if not user_id:\n+            raise RequiredArgument(\n+                function=self.modify_user.__name__, argument='user_id'\n+            )\n+\n+        cmd = XmlCommand(\"modify_user\")\n+\n+        cmd.set_attribute(\"user_id\", user_id)\n+\n+        if name:\n+            cmd.add_element(\"new_name\", name)\n+\n+        if role_ids:\n+            for role in role_ids:\n+                cmd.add_element(\"role\", attrs={\"id\": role})\n+\n+        if hosts:\n+            cmd.add_element(\n+                \"hosts\",\n+                to_comma_list(hosts),\n+                attrs={\"allow\": to_bool(hosts_allow)},\n+            )\n+\n+        if ifaces is not None:\n+            major, minor = self.get_protocol_version()\n+            deprecation(\n+                \"The ifaces parameter has been removed in GMP\"\n+                f\" version {major}{minor}\"\n+            )\n+\n+        if ifaces_allow is not None:\n+            major, minor = self.get_protocol_version()\n+            deprecation(\n+                \"The ifaces_allow parameter has been removed in GMP\"\n+                f\" version {major}{minor}\"\n+            )\n+\n+        if comment:\n+            cmd.add_element(\"comment\", comment)\n+\n+        if password:\n+            cmd.add_element(\"password\", password)\n+\n+        if auth_source:\n+            _xmlauthsrc = cmd.add_element(\"sources\")\n+            _xmlauthsrc.add_element(\"source\", auth_source.value)\n+\n+        if group_ids:\n+            _xmlgroups = cmd.add_element(\"groups\")\n+            for group_id in group_ids:\n+                _xmlgroups.add_element(\"group\", attrs={\"id\": group_id})\n+\n+        return self._send_xml_command(cmd)"
    },
    {
      "sha": "dd464f0e6f137ed440c141e08957973a03e1317a",
      "filename": "tests/protocols/gmpv2110/entities/test_users.py",
      "status": "modified",
      "additions": 1,
      "deletions": 2,
      "changes": 3,
      "blob_url": "https://github.com/greenbone/python-gvm/blob/63b99dab803c778eff05783025334a61e58614e7/tests/protocols/gmpv2110/entities/test_users.py",
      "raw_url": "https://github.com/greenbone/python-gvm/raw/63b99dab803c778eff05783025334a61e58614e7/tests/protocols/gmpv2110/entities/test_users.py",
      "contents_url": "https://api.github.com/repos/greenbone/python-gvm/contents/tests/protocols/gmpv2110/entities/test_users.py?ref=63b99dab803c778eff05783025334a61e58614e7",
      "patch": "@@ -17,9 +17,8 @@\n # along with this program.  If not, see .\n \n from ...gmpv2110 import Gmpv2110TestCase\n-from .users import GmpModifyUserTestMixin\n+from .users import GmpCreateUserTestMixin, GmpModifyUserTestMixin\n from ...gmpv208.entities.users import (\n-    GmpCreateUserTestMixin,\n     GmpCloneUserTestMixin,\n     GmpDeleteUserTestMixin,\n     GmpGetUsersTestMixin,"
    },
    {
      "sha": "bc37e6b01a09b2427df50cc7a470f9cef0c1f651",
      "filename": "tests/protocols/gmpv2110/entities/users/__init__.py",
      "status": "modified",
      "additions": 1,
      "deletions": 0,
      "changes": 1,
      "blob_url": "https://github.com/greenbone/python-gvm/blob/63b99dab803c778eff05783025334a61e58614e7/tests/protocols/gmpv2110/entities/users/__init__.py",
      "raw_url": "https://github.com/greenbone/python-gvm/raw/63b99dab803c778eff05783025334a61e58614e7/tests/protocols/gmpv2110/entities/users/__init__.py",
      "contents_url": "https://api.github.com/repos/greenbone/python-gvm/contents/tests/protocols/gmpv2110/entities/users/__init__.py?ref=63b99dab803c778eff05783025334a61e58614e7",
      "patch": "@@ -16,4 +16,5 @@\n # You should have received a copy of the GNU General Public License\n # along with this program.  If not, see .\n \n+from .test_create_user import GmpCreateUserTestMixin\n from .test_modify_user import GmpModifyUserTestMixin"
    },
    {
      "sha": "8b9c51973a7667e43aeb77471a63f77b401be838",
      "filename": "tests/protocols/gmpv2110/entities/users/test_create_user.py",
      "status": "added",
      "additions": 122,
      "deletions": 0,
      "changes": 122,
      "blob_url": "https://github.com/greenbone/python-gvm/blob/63b99dab803c778eff05783025334a61e58614e7/tests/protocols/gmpv2110/entities/users/test_create_user.py",
      "raw_url": "https://github.com/greenbone/python-gvm/raw/63b99dab803c778eff05783025334a61e58614e7/tests/protocols/gmpv2110/entities/users/test_create_user.py",
      "contents_url": "https://api.github.com/repos/greenbone/python-gvm/contents/tests/protocols/gmpv2110/entities/users/test_create_user.py?ref=63b99dab803c778eff05783025334a61e58614e7",
      "patch": "@@ -0,0 +1,122 @@\n+# -*- coding: utf-8 -*-\n+# Copyright (C) 2018-2022 Greenbone AG\n+#\n+# SPDX-License-Identifier: GPL-3.0-or-later\n+#\n+# This program is free software: you can redistribute it and/or modify\n+# it under the terms of the GNU General Public License as published by\n+# the Free Software Foundation, either version 3 of the License, or\n+# (at your option) any later version.\n+#\n+# This program is distributed in the hope that it will be useful,\n+# but WITHOUT ANY WARRANTY; without even the implied warranty of\n+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the\n+# GNU General Public License for more details.\n+#\n+# You should have received a copy of the GNU General Public License\n+# along with this program.  If not, see .\n+\n+from unittest.mock import patch, call\n+from gvm.errors import RequiredArgument\n+\n+\n+class GmpCreateUserTestMixin:\n+    def test_create_user_missing_name(self):\n+        with self.assertRaises(RequiredArgument):\n+            self.gmp.create_user(name=None)\n+\n+        with self.assertRaises(RequiredArgument):\n+            self.gmp.create_user(name='')\n+\n+    def test_create_user(self):\n+        self.gmp.create_user(name='foo')\n+\n+        self.connection.send.has_been_called_with(\n+            '' 'foo' ''\n+        )\n+\n+    def test_create_user_with_password(self):\n+        self.gmp.create_user(name='foo', password='bar')\n+\n+        self.connection.send.has_been_called_with(\n+            ''\n+            'foo'\n+            'bar'\n+            ''\n+        )\n+\n+    def test_create_user_with_hosts(self):\n+        self.gmp.create_user(name='foo', hosts=['h1', 'h2'], hosts_allow=True)\n+\n+        self.connection.send.has_been_called_with(\n+            ''\n+            'foo'\n+            'h1,h2'\n+            ''\n+        )\n+\n+        self.gmp.create_user(name='foo', hosts=['h1', 'h2'])\n+\n+        self.connection.send.has_been_called_with(\n+            ''\n+            'foo'\n+            'h1,h2'\n+            ''\n+        )\n+\n+        self.gmp.create_user(name='foo', hosts=['h1', 'h2'], hosts_allow=False)\n+\n+        self.connection.send.has_been_called_with(\n+            ''\n+            'foo'\n+            'h1,h2'\n+            ''\n+        )\n+\n+    @patch('gvm.protocols.gmpv2110.entities.users.deprecation')\n+    def test_create_user_with_ifaces(self, deprecation_mock):\n+        self.gmp.create_user(name='foo', ifaces=['h1', 'h2'], ifaces_allow=True)\n+\n+        self.connection.send.has_been_called_with(\n+            '' 'foo' ''\n+        )\n+\n+        self.gmp.create_user(name='foo', ifaces=['h1', 'h2'])\n+\n+        self.connection.send.has_been_called_with(\n+            '' 'foo' ''\n+        )\n+\n+        self.gmp.create_user(\n+            name='foo', ifaces=['h1', 'h2'], ifaces_allow=False\n+        )\n+\n+        self.connection.send.has_been_called_with(\n+            '' 'foo' ''\n+        )\n+\n+        # pylint: disable=line-too-long\n+        deprecation_calls = [\n+            call('The ifaces parameter has been removed in GMP version 2110'),\n+            call(\n+                'The ifaces_allow parameter has been removed in GMP version 2110'\n+            ),\n+            call('The ifaces parameter has been removed in GMP version 2110'),\n+            call('The ifaces parameter has been removed in GMP version 2110'),\n+            call(\n+                'The ifaces_allow parameter has been removed in GMP version 2110'\n+            ),\n+        ]\n+        # pylint: enable=line-too-long\n+        deprecation_mock.assert_has_calls(deprecation_calls)\n+\n+    def test_create_user_with_role_ids(self):\n+        self.gmp.create_user(name='foo', role_ids=['r1', 'r2'])\n+\n+        self.connection.send.has_been_called_with(\n+            ''\n+            'foo'\n+            ''\n+            ''\n+            ''\n+        )"
    },
    {
      "sha": "156d04f50e99207706cd5068c2fb3bdd5930cab5",
      "filename": "tests/protocols/gmpv2110/entities/users/test_modify_user.py",
      "status": "modified",
      "additions": 29,
      "deletions": 27,
      "changes": 56,
      "blob_url": "https://github.com/greenbone/python-gvm/blob/63b99dab803c778eff05783025334a61e58614e7/tests/protocols/gmpv2110/entities/users/test_modify_user.py",
      "raw_url": "https://github.com/greenbone/python-gvm/raw/63b99dab803c778eff05783025334a61e58614e7/tests/protocols/gmpv2110/entities/users/test_modify_user.py",
      "contents_url": "https://api.github.com/repos/greenbone/python-gvm/contents/tests/protocols/gmpv2110/entities/users/test_modify_user.py?ref=63b99dab803c778eff05783025334a61e58614e7",
      "patch": "@@ -16,6 +16,7 @@\n # You should have received a copy of the GNU General Public License\n # along with this program.  If not, see .\n \n+from unittest.mock import patch, call\n from gvm.errors import RequiredArgument\n from gvm.protocols.gmpv2110 import UserAuthType\n \n@@ -156,43 +157,44 @@ def test_modify_user_with_hosts(self):\n             ''\n         )\n \n-    def test_modify_user_with_ifaces(self):\n+    @patch('gvm.protocols.gmpv2110.entities.users.deprecation')\n+    def test_modify_user_with_ifaces(self, deprecation_mock):\n         self.gmp.modify_user(user_id='u1', ifaces=[])\n \n         self.connection.send.has_been_called_with('')\n \n-        self.gmp.modify_user(user_id='u1', ifaces=['foo'])\n+        self.gmp.modify_user(user_id='u2', ifaces=['foo'])\n \n-        self.connection.send.has_been_called_with(\n-            ''\n-            'foo'\n-            ''\n-        )\n+        self.connection.send.has_been_called_with('')\n \n-        self.gmp.modify_user(user_id='u1', ifaces=['foo', 'bar'])\n+        self.gmp.modify_user(user_id='u3', ifaces=['foo', 'bar'])\n \n-        self.connection.send.has_been_called_with(\n-            ''\n-            'foo,bar'\n-            ''\n-        )\n+        self.connection.send.has_been_called_with('')\n \n         self.gmp.modify_user(\n-            user_id='u1', ifaces=['foo', 'bar'], ifaces_allow=False\n+            user_id='u4', ifaces=['foo', 'bar'], ifaces_allow=False\n         )\n \n-        self.connection.send.has_been_called_with(\n-            ''\n-            'foo,bar'\n-            ''\n-        )\n+        self.connection.send.has_been_called_with('')\n \n         self.gmp.modify_user(\n-            user_id='u1', ifaces=['foo', 'bar'], ifaces_allow=True\n-        )\n-\n-        self.connection.send.has_been_called_with(\n-            ''\n-            'foo,bar'\n-            ''\n-        )\n+            user_id='u5', ifaces=['foo', 'bar'], ifaces_allow=True\n+        )\n+\n+        self.connection.send.has_been_called_with('')\n+\n+        # pylint: disable=line-too-long\n+        deprecation_calls = [\n+            call('The ifaces parameter has been removed in GMP version 2110'),\n+            call('The ifaces parameter has been removed in GMP version 2110'),\n+            call('The ifaces parameter has been removed in GMP version 2110'),\n+            call(\n+                'The ifaces_allow parameter has been removed in GMP version 2110'\n+            ),\n+            call('The ifaces parameter has been removed in GMP version 2110'),\n+            call(\n+                'The ifaces_allow parameter has been removed in GMP version 2110'\n+            ),\n+        ]\n+        # pylint: enable=line-too-long\n+        deprecation_mock.assert_has_calls(deprecation_calls)"
    }
]
pontos-25.3.2/tests/github/api/release-response.json000066400000000000000000000127701476255566300225230ustar00rootroot00000000000000{
  "url": "https://api.github.com/repos/greenbone/pontos/releases/52499047",
  "assets_url": "https://api.github.com/repos/greenbone/pontos/releases/52499047/assets",
  "upload_url": "https://uploads.github.com/repos/greenbone/pontos/releases/52499047/assets{?name,label}",
  "html_url": "https://github.com/greenbone/pontos/releases/tag/v21.11.0",
  "id": 52499047,
  "author": {
    "login": "greenbonebot",
    "id": 85254666,
    "node_id": "MDQ6VXNlcjg1MjU0NjY2",
    "avatar_url": "https://avatars.githubusercontent.com/u/85254666?v=4",
    "gravatar_id": "",
    "url": "https://api.github.com/users/greenbonebot",
    "html_url": "https://github.com/greenbonebot",
    "followers_url": "https://api.github.com/users/greenbonebot/followers",
    "following_url": "https://api.github.com/users/greenbonebot/following{/other_user}",
    "gists_url": "https://api.github.com/users/greenbonebot/gists{/gist_id}",
    "starred_url": "https://api.github.com/users/greenbonebot/starred{/owner}{/repo}",
    "subscriptions_url": "https://api.github.com/users/greenbonebot/subscriptions",
    "organizations_url": "https://api.github.com/users/greenbonebot/orgs",
    "repos_url": "https://api.github.com/users/greenbonebot/repos",
    "events_url": "https://api.github.com/users/greenbonebot/events{/privacy}",
    "received_events_url": "https://api.github.com/users/greenbonebot/received_events",
    "type": "User",
    "site_admin": false
  },
  "node_id": "RE_kwDODyT54s4DIRJn",
  "tag_name": "v21.11.0",
  "target_commitish": "main",
  "name": "pontos 21.11.0",
  "draft": false,
  "prerelease": false,
  "created_at": "2021-11-02T08:21:21Z",
  "published_at": "2021-11-02T08:21:24Z",
  "assets": [
    {
      "url": "https://api.github.com/repos/greenbone/pontos/releases/assets/48415828",
      "id": 48415828,
      "node_id": "RA_kwDODyT54s4C4sRU",
      "name": "pontos-21.11.0.tar.gz.asc",
      "label": "",
      "uploader": {
        "login": "greenbonebot",
        "id": 85254666,
        "node_id": "MDQ6VXNlcjg1MjU0NjY2",
        "avatar_url": "https://avatars.githubusercontent.com/u/85254666?v=4",
        "gravatar_id": "",
        "url": "https://api.github.com/users/greenbonebot",
        "html_url": "https://github.com/greenbonebot",
        "followers_url": "https://api.github.com/users/greenbonebot/followers",
        "following_url": "https://api.github.com/users/greenbonebot/following{/other_user}",
        "gists_url": "https://api.github.com/users/greenbonebot/gists{/gist_id}",
        "starred_url": "https://api.github.com/users/greenbonebot/starred{/owner}{/repo}",
        "subscriptions_url": "https://api.github.com/users/greenbonebot/subscriptions",
        "organizations_url": "https://api.github.com/users/greenbonebot/orgs",
        "repos_url": "https://api.github.com/users/greenbonebot/repos",
        "events_url": "https://api.github.com/users/greenbonebot/events{/privacy}",
        "received_events_url": "https://api.github.com/users/greenbonebot/received_events",
        "type": "User",
        "site_admin": false
      },
      "content_type": "application/octet-stream",
      "state": "uploaded",
      "size": 833,
      "download_count": 1,
      "created_at": "2021-11-02T08:21:31Z",
      "updated_at": "2021-11-02T08:21:31Z",
      "browser_download_url": "https://github.com/greenbone/pontos/releases/download/v21.11.0/pontos-21.11.0.tar.gz.asc"
    },
    {
      "url": "https://api.github.com/repos/greenbone/pontos/releases/assets/48415827",
      "id": 48415827,
      "node_id": "RA_kwDODyT54s4C4sRT",
      "name": "pontos-21.11.0.zip.asc",
      "label": "",
      "uploader": {
        "login": "greenbonebot",
        "id": 85254666,
        "node_id": "MDQ6VXNlcjg1MjU0NjY2",
        "avatar_url": "https://avatars.githubusercontent.com/u/85254666?v=4",
        "gravatar_id": "",
        "url": "https://api.github.com/users/greenbonebot",
        "html_url": "https://github.com/greenbonebot",
        "followers_url": "https://api.github.com/users/greenbonebot/followers",
        "following_url": "https://api.github.com/users/greenbonebot/following{/other_user}",
        "gists_url": "https://api.github.com/users/greenbonebot/gists{/gist_id}",
        "starred_url": "https://api.github.com/users/greenbonebot/starred{/owner}{/repo}",
        "subscriptions_url": "https://api.github.com/users/greenbonebot/subscriptions",
        "organizations_url": "https://api.github.com/users/greenbonebot/orgs",
        "repos_url": "https://api.github.com/users/greenbonebot/repos",
        "events_url": "https://api.github.com/users/greenbonebot/events{/privacy}",
        "received_events_url": "https://api.github.com/users/greenbonebot/received_events",
        "type": "User",
        "site_admin": false
      },
      "content_type": "application/octet-stream",
      "state": "uploaded",
      "size": 833,
      "download_count": 1,
      "created_at": "2021-11-02T08:21:31Z",
      "updated_at": "2021-11-02T08:21:31Z",
      "browser_download_url": "https://github.com/greenbone/pontos/releases/download/v21.11.0/pontos-21.11.0.zip.asc"
    }
  ],
  "tarball_url": "https://api.github.com/repos/greenbone/pontos/tarball/v21.11.0",
  "zipball_url": "https://api.github.com/repos/greenbone/pontos/zipball/v21.11.0",
  "body": "## [21.11.0] - 2021-11-02\r\n\r\n## Bug Fixes\r\n* Using the regex like this [:-|] is incorrect, because of the special purpose of - in a set. Use [:|-] instead [50479dc](https://github.com/greenbone/pontos/commit/50479dc)\r\n\r\n[21.11.0]: https://github.com/greenbone/pontos/compare/v21.10.2...v21.11.0"
}
pontos-25.3.2/tests/github/api/test_artifacts.py000066400000000000000000000263011476255566300217400ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later
#

# pylint: disable=redefined-builtin, line-too-long

from pathlib import Path
from unittest.mock import MagicMock

import httpx

from pontos.github.api.artifacts import GitHubAsyncRESTArtifacts
from tests import AsyncIteratorMock, AsyncMock, aiter, anext
from tests.github.api import GitHubAsyncRESTTestCase, create_response

here = Path(__file__).parent


class GitHubAsyncRESTArtifactsTestCase(GitHubAsyncRESTTestCase):
    api_cls = GitHubAsyncRESTArtifacts

    async def test_get(self):
        response = create_response()
        response.json.return_value = {
            "id": 1,
            "node_id": "MDg6QXJ0aWZhY3QxMQ==",
            "name": "Rails",
            "size_in_bytes": 556,
            "url": "https://api.github.com/repos/octo-org/octo-docs/actions/artifacts/11",
            "archive_download_url": "https://api.github.com/repos/octo-org/octo-docs/actions/artifacts/11/zip",
            "expired": False,
            "created_at": "2020-01-10T14:59:22Z",
            "expires_at": "2020-03-21T14:59:22Z",
            "updated_at": "2020-02-21T14:59:22Z",
            "workflow_run": {
                "id": 2332938,
                "repository_id": 1296269,
                "head_repository_id": 1296269,
                "head_branch": "main",
                "head_sha": "328faa0536e6fef19753d9d91dc96a9931694ce3",
            },
        }
        self.client.get.return_value = response

        artifact = await self.api.get("foo/bar", "123")

        self.client.get.assert_awaited_once_with(
            "/repos/foo/bar/actions/artifacts/123"
        )

        self.assertEqual(artifact.id, 1)

    async def test_get_failure(self):
        response = create_response()
        self.client.get.side_effect = httpx.HTTPStatusError(
            "404", request=MagicMock(), response=response
        )

        with self.assertRaises(httpx.HTTPStatusError):
            await self.api.get("foo/bar", "123")

        self.client.get.assert_awaited_once_with(
            "/repos/foo/bar/actions/artifacts/123"
        )

    async def test_get_all(self):
        response1 = create_response()
        response1.json.return_value = {
            "artifacts": [
                {
                    "id": 1,
                    "node_id": "MDg6QXJ0aWZhY3QxMQ==",
                    "name": "Rails",
                    "size_in_bytes": 556,
                    "url": "https://api.github.com/repos/octo-org/octo-docs/actions/artifacts/11",
                    "archive_download_url": "https://api.github.com/repos/octo-org/octo-docs/actions/artifacts/11/zip",
                    "expired": False,
                    "created_at": "2020-01-10T14:59:22Z",
                    "expires_at": "2020-03-21T14:59:22Z",
                    "updated_at": "2020-02-21T14:59:22Z",
                    "workflow_run": {
                        "id": 2332938,
                        "repository_id": 1296269,
                        "head_repository_id": 1296269,
                        "head_branch": "main",
                        "head_sha": "328faa0536e6fef19753d9d91dc96a9931694ce3",
                    },
                }
            ]
        }
        response2 = create_response()
        response2.json.return_value = {
            "artifacts": [
                {
                    "id": 2,
                    "node_id": "MDg6QXJ0aWZhY3QxMw==",
                    "name": "Test output",
                    "size_in_bytes": 453,
                    "url": "https://api.github.com/repos/octo-org/octo-docs/actions/artifacts/2",
                    "archive_download_url": "https://api.github.com/repos/octo-org/octo-docs/actions/artifacts/2/zip",
                    "expired": False,
                    "created_at": "2020-01-10T14:59:22Z",
                    "expires_at": "2020-03-21T14:59:22Z",
                    "updated_at": "2020-02-21T14:59:22Z",
                    "workflow_run": {
                        "id": 2332942,
                        "repository_id": 1296269,
                        "head_repository_id": 1296269,
                        "head_branch": "main",
                        "head_sha": "178f4f6090b3fccad4a65b3e83d076a622d59652",
                    },
                },
                {
                    "id": 3,
                    "node_id": "MDg6QXJ0aWZhY3QxMw==",
                    "name": "Test output",
                    "size_in_bytes": 123,
                    "url": "https://api.github.com/repos/octo-org/octo-docs/actions/artifacts/3",
                    "archive_download_url": "https://api.github.com/repos/octo-org/octo-docs/actions/artifacts/3/zip",
                    "expired": False,
                    "created_at": "2020-01-10T14:59:22Z",
                    "expires_at": "2020-03-21T14:59:22Z",
                    "updated_at": "2020-02-21T14:59:22Z",
                    "workflow_run": {
                        "id": 2332942,
                        "repository_id": 1296269,
                        "head_repository_id": 1296269,
                        "head_branch": "main",
                        "head_sha": "178f4f6090b3fccad4a65b3e83d076a622d59652",
                    },
                },
            ]
        }

        self.client.get_all.return_value = AsyncIteratorMock(
            [response1, response2]
        )

        async_it = aiter(self.api.get_all("foo/bar"))
        artifact = await anext(async_it)
        self.assertEqual(artifact.id, 1)
        artifact = await anext(async_it)
        self.assertEqual(artifact.id, 2)
        artifact = await anext(async_it)
        self.assertEqual(artifact.id, 3)

        with self.assertRaises(StopAsyncIteration):
            await anext(async_it)

        self.client.get_all.assert_called_once_with(
            "/repos/foo/bar/actions/artifacts",
            params={"per_page": "100"},
        )

    async def test_get_workflow_run_artifacts(self):
        response1 = create_response()
        response1.json.return_value = {
            "artifacts": [
                {
                    "id": 1,
                    "node_id": "MDg6QXJ0aWZhY3QxMQ==",
                    "name": "Rails",
                    "size_in_bytes": 556,
                    "url": "https://api.github.com/repos/octo-org/octo-docs/actions/artifacts/1",
                    "archive_download_url": "https://api.github.com/repos/octo-org/octo-docs/actions/artifacts/1/zip",
                    "expired": False,
                    "created_at": "2020-01-10T14:59:22Z",
                    "expires_at": "2020-03-21T14:59:22Z",
                    "updated_at": "2020-02-21T14:59:22Z",
                    "workflow_run": {
                        "id": 2332938,
                        "repository_id": 1296269,
                        "head_repository_id": 1296269,
                        "head_branch": "main",
                        "head_sha": "328faa0536e6fef19753d9d91dc96a9931694ce3",
                    },
                }
            ]
        }
        response2 = create_response()
        response2.json.return_value = {
            "artifacts": [
                {
                    "id": 2,
                    "node_id": "MDg6QXJ0aWZhY3QxMQ==",
                    "name": "Rails",
                    "size_in_bytes": 556,
                    "url": "https://api.github.com/repos/octo-org/octo-docs/actions/artifacts/2",
                    "archive_download_url": "https://api.github.com/repos/octo-org/octo-docs/actions/artifacts/2/zip",
                    "expired": False,
                    "created_at": "2020-01-10T14:59:22Z",
                    "expires_at": "2020-03-21T14:59:22Z",
                    "updated_at": "2020-02-21T14:59:22Z",
                    "workflow_run": {
                        "id": 2332938,
                        "repository_id": 1296269,
                        "head_repository_id": 1296269,
                        "head_branch": "main",
                        "head_sha": "328faa0536e6fef19753d9d91dc96a9931694ce3",
                    },
                },
                {
                    "id": 3,
                    "node_id": "MDg6QXJ0aWZhY3QxMw==",
                    "name": "Test output",
                    "size_in_bytes": 453,
                    "url": "https://api.github.com/repos/octo-org/octo-docs/actions/artifacts/3",
                    "archive_download_url": "https://api.github.com/repos/octo-org/octo-docs/actions/artifacts/3/zip",
                    "expired": False,
                    "created_at": "2020-01-10T14:59:22Z",
                    "expires_at": "2020-03-21T14:59:22Z",
                    "updated_at": "2020-02-21T14:59:22Z",
                    "workflow_run": {
                        "id": 2332942,
                        "repository_id": 1296269,
                        "head_repository_id": 1296269,
                        "head_branch": "main",
                        "head_sha": "178f4f6090b3fccad4a65b3e83d076a622d59652",
                    },
                },
            ]
        }

        self.client.get_all.return_value = AsyncIteratorMock(
            [response1, response2]
        )

        async_it = aiter(self.api.get_workflow_run_artifacts("foo/bar", "123"))
        artifact = await anext(async_it)
        self.assertEqual(artifact.id, 1)
        artifact = await anext(async_it)
        self.assertEqual(artifact.id, 2)
        artifact = await anext(async_it)
        self.assertEqual(artifact.id, 3)

        with self.assertRaises(StopAsyncIteration):
            await anext(async_it)

        self.client.get_all.assert_called_once_with(
            "/repos/foo/bar/actions/runs/123/artifacts",
            params={"per_page": "100"},
        )

    async def test_delete(self):
        response = create_response()
        self.client.delete.return_value = response

        await self.api.delete("foo/bar", "123")

        self.client.delete.assert_awaited_once_with(
            "/repos/foo/bar/actions/artifacts/123"
        )

    async def test_delete_failure(self):
        response = create_response()
        self.client.delete.side_effect = httpx.HTTPStatusError(
            "404", request=MagicMock(), response=response
        )

        with self.assertRaises(httpx.HTTPStatusError):
            await self.api.delete("foo/bar", "123")

        self.client.delete.assert_awaited_once_with(
            "/repos/foo/bar/actions/artifacts/123"
        )

    async def test_download(self):
        response = create_response(headers=MagicMock())
        response.headers.get.return_value = 2
        response.aiter_bytes.return_value = AsyncIteratorMock(["1", "2"])
        stream_context = AsyncMock()
        stream_context.__aenter__.return_value = response
        self.client.stream.return_value = stream_context

        async with self.api.download("foo/bar", 123) as download_iterable:
            it = aiter(download_iterable)
            content, progress = await anext(it)

            self.assertEqual(content, "1")
            self.assertEqual(progress, 50)

            content, progress = await anext(it)
            self.assertEqual(content, "2")
            self.assertEqual(progress, 100)

        self.client.stream.assert_called_once_with(
            "/repos/foo/bar/actions/artifacts/123/zip"
        )
pontos-25.3.2/tests/github/api/test_billing.py000066400000000000000000000040621476255566300214000ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later

# ruff: noqa:E501

from pontos.github.api.billing import GitHubAsyncRESTBilling
from tests.github.api import GitHubAsyncRESTTestCase, create_response


class GitHubAsyncRESTBillingTestCase(GitHubAsyncRESTTestCase):
    api_cls = GitHubAsyncRESTBilling

    async def test_actions(self):
        response = create_response()
        response.json.return_value = {
            "total_minutes_used": 305,
            "total_paid_minutes_used": 0,
            "included_minutes": 3000,
            "minutes_used_breakdown": {
                "UBUNTU": 205,
                "MACOS": 10,
                "WINDOWS": 90,
            },
        }
        self.client.get.return_value = response

        billing = await self.api.actions("foo")

        self.client.get.assert_awaited_once_with(
            "/orgs/foo/settings/billing/actions",
        )

        self.assertEqual(billing.total_minutes_used, 305)

    async def test_packages(self):
        response = create_response()
        response.json.return_value = {
            "total_gigabytes_bandwidth_used": 50,
            "total_paid_gigabytes_bandwidth_used": 40,
            "included_gigabytes_bandwidth": 10,
        }
        self.client.get.return_value = response

        billing = await self.api.packages("foo")

        self.client.get.assert_awaited_once_with(
            "/orgs/foo/settings/billing/packages",
        )

        self.assertEqual(billing.total_gigabytes_bandwidth_used, 50)

    async def test_storage(self):
        response = create_response()
        response.json.return_value = {
            "days_left_in_billing_cycle": 20,
            "estimated_paid_storage_for_month": 15,
            "estimated_storage_for_month": 40,
        }
        self.client.get.return_value = response

        billing = await self.api.storage("foo")

        self.client.get.assert_awaited_once_with(
            "/orgs/foo/settings/billing/shared-storage",
        )

        self.assertEqual(billing.days_left_in_billing_cycle, 20)
pontos-25.3.2/tests/github/api/test_branch.py000066400000000000000000000460251476255566300212220ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later
#

# pylint: disable=line-too-long

import unittest
from pathlib import Path
from unittest.mock import MagicMock

from httpx import HTTPStatusError

from pontos.github.api.branch import (
    GitHubAsyncRESTBranches,
    update_from_applied_settings,
)
from pontos.github.models.branch import BranchProtection
from tests.github.api import GitHubAsyncRESTTestCase, create_response

here = Path(__file__).parent


class UpdateFromAppliedSettingsTestCase(unittest.TestCase):
    def test_update_from_applied_settings(self):
        branch_protection = BranchProtection.from_dict(
            {
                "url": "https://api.github.com/repos/foo/bar/branches/branch_protection/protection/restrictions",
                "users_url": "https://api.github.com/repos/foo/bar/branches/branch_protection/protection/restrictions/users",
                "teams_url": "https://api.github.com/repos/foo/bar/branches/branch_protection/protection/restrictions/teams",
                "apps_url": "https://api.github.com/repos/foo/bar/branches/branch_protection/protection/restrictions/apps",
                "users": [
                    {
                        "login": "greenbonebot",
                        "id": 123,
                        "node_id": "MDQ6VXNlcjg1MjU0NjY2",
                        "avatar_url": "https://avatars.githubusercontent.com/u/85254666?v=4",
                        "gravatar_id": "",
                        "url": "https://api.github.com/users/greenbonebot",
                        "html_url": "https://github.com/greenbonebot",
                        "followers_url": "https://api.github.com/users/greenbonebot/followers",
                        "following_url": "https://api.github.com/users/greenbonebot/following{/other_user}",
                        "gists_url": "https://api.github.com/users/greenbonebot/gists{/gist_id}",
                        "starred_url": "https://api.github.com/users/greenbonebot/starred{/owner}{/repo}",
                        "subscriptions_url": "https://api.github.com/users/greenbonebot/subscriptions",
                        "organizations_url": "https://api.github.com/users/greenbonebot/orgs",
                        "repos_url": "https://api.github.com/users/greenbonebot/repos",
                        "events_url": "https://api.github.com/users/greenbonebot/events{/privacy}",
                        "received_events_url": "https://api.github.com/users/greenbonebot/received_events",
                        "type": "User",
                        "site_admin": False,
                    }
                ],
                "teams": [],
                "apps": [],
                "lock_branch": {"enabled": False},
                "allow_fork_syncing": {"enabled": False},
                "required_signatures": {"enabled": False},
            }
        )

        kwargs = update_from_applied_settings(
            branch_protection=branch_protection,
            lock_branch=True,
            allow_fork_syncing=True,
            allow_deletions=True,
            allow_force_pushes=True,
        )

        self.assertTrue(kwargs["lock_branch"])
        self.assertTrue(kwargs["allow_fork_syncing"])
        self.assertTrue(kwargs["allow_deletions"])
        self.assertTrue(kwargs["allow_force_pushes"])

        self.assertFalse(kwargs["required_signatures"])

        self.assertIsNone(kwargs["required_linear_history"])
        self.assertIsNone(kwargs["block_creations"])
        self.assertIsNone(kwargs["required_conversation_resolution"])
        self.assertIsNone(kwargs["enforce_admins"])


class GitHubAsyncRESTBranchesTestCase(GitHubAsyncRESTTestCase):
    api_cls = GitHubAsyncRESTBranches

    async def test_exists(self):
        response = create_response(is_success=True)
        self.client.get.return_value = response

        self.assertTrue(await self.api.exists("foo/bar", "baz"))
        self.client.get.assert_awaited_once_with("/repos/foo/bar/branches/baz")

    async def test_not_exists(self):
        response = create_response(is_success=False)
        self.client.get.return_value = response

        self.assertFalse(await self.api.exists("foo/bar", "baz"))
        self.client.get.assert_awaited_once_with("/repos/foo/bar/branches/baz")

    async def test_delete_branch(self):
        response = create_response()
        self.client.delete.return_value = response

        await self.api.delete("foo/bar", "baz")

        self.client.delete.assert_awaited_once_with(
            "/repos/foo/bar/git/refs/baz"
        )

    async def test_delete_branch_failure(self):
        response = create_response()
        error = HTTPStatusError("404", request=MagicMock(), response=response)
        response.raise_for_status.side_effect = error

        self.client.delete.return_value = response

        with self.assertRaises(HTTPStatusError):
            await self.api.delete("foo/bar", "baz")

        self.client.delete.assert_awaited_once_with(
            "/repos/foo/bar/git/refs/baz"
        )

    async def test_protection_rules(self):
        rules = {
            "url": "https://api.github.com/repos/octocat/Hello-World/branches/main/protection",
            "required_signatures": {
                "url": "https://api.github.com/repos/octocat/Hello-World/branches/main/protection/required_signatures",
                "enabled": False,
            },
            "enforce_admins": {
                "url": "https://api.github.com/repos/octocat/Hello-World/branches/main/protection/enforce_admins",
                "enabled": False,
            },
        }
        response = create_response()
        response.json.return_value = rules

        self.client.get.return_value = response

        data = await self.api.protection_rules("foo/bar", "baz")

        self.client.get.assert_awaited_once_with(
            "/repos/foo/bar/branches/baz/protection"
        )
        self.assertEqual(
            data.url,
            "https://api.github.com/repos/octocat/Hello-World/branches/main/protection",
        )

    async def test_protection_rules_failure(self):
        response = create_response()
        error = HTTPStatusError("404", request=MagicMock(), response=response)
        response.raise_for_status.side_effect = error

        self.client.get.return_value = response

        with self.assertRaises(HTTPStatusError):
            await self.api.protection_rules("foo/bar", "baz")

        self.client.get.assert_awaited_once_with(
            "/repos/foo/bar/branches/baz/protection"
        )

    async def test_delete_protection_rules(self):
        response = create_response()
        self.client.delete.return_value = response

        await self.api.delete_protection_rules("foo/bar", "baz")

        self.client.delete.assert_awaited_once_with(
            "/repos/foo/bar/branches/baz/protection"
        )

    async def test_delete_protection_rules_failure(self):
        response = create_response()
        error = HTTPStatusError("404", request=MagicMock(), response=response)
        response.raise_for_status.side_effect = error

        self.client.delete.return_value = response

        with self.assertRaises(HTTPStatusError):
            await self.api.delete_protection_rules("foo/bar", "baz")

        self.client.delete.assert_awaited_once_with(
            "/repos/foo/bar/branches/baz/protection"
        )

    async def test_update_protection_rules_defaults(self):
        response = create_response()
        response.json.return_value = {
            "url": "https://api.github.com/repos/octocat/Hello-World/branches/main/protection",
        }
        self.client.put.return_value = response

        rules = await self.api.update_protection_rules(
            "foo/bar",
            "baz",
        )
        self.client.put.assert_awaited_once_with(
            "/repos/foo/bar/branches/baz/protection",
            data={
                "required_status_checks": None,
                "enforce_admins": None,
                "required_pull_request_reviews": None,
                "restrictions": None,
            },
        )

        self.assertEqual(
            rules.url,
            "https://api.github.com/repos/octocat/Hello-World/branches/main/protection",
        )

    async def test_update_protection_rules(self):
        response = create_response()
        response.json.return_value = {
            "url": "https://api.github.com/repos/octocat/Hello-World/branches/main/protection",
        }
        self.client.put.return_value = response

        rules = await self.api.update_protection_rules(
            "foo/bar",
            "baz",
            required_status_checks=[("foo", "123"), ("bar", None)],
            require_branches_to_be_up_to_date=True,
            enforce_admins=True,
            dismissal_restrictions_users=("foo", "bar"),
            dismissal_restrictions_teams=("team_foo", "team_bar"),
            dismissal_restrictions_apps=("123", "321"),
            dismiss_stale_reviews=True,
            require_code_owner_reviews=True,
            required_approving_review_count=2,
            require_last_push_approval=True,
            bypass_pull_request_allowances_users=("foo", "bar"),
            bypass_pull_request_allowances_teams=("team_foo", "team_bar"),
            bypass_pull_request_allowances_apps=("123", "321"),
            restrictions_users=("foo", "bar"),
            restrictions_teams=("team_foo", "team_bar"),
            restrictions_apps=("123", "321"),
            required_linear_history=True,
            allow_force_pushes=True,
            allow_deletions=True,
            block_creations=True,
            required_conversation_resolution=True,
            lock_branch=True,
            allow_fork_syncing=True,
        )

        self.client.put.assert_awaited_once_with(
            "/repos/foo/bar/branches/baz/protection",
            data={
                "required_status_checks": {
                    "strict": True,
                    "checks": [
                        {"context": "foo", "app_id": "123"},
                        {"context": "bar"},
                    ],
                },
                "enforce_admins": True,
                "required_pull_request_reviews": {
                    "dismissal_restrictions": {
                        "users": ["foo", "bar"],
                        "teams": ["team_foo", "team_bar"],
                        "apps": ["123", "321"],
                    },
                    "dismiss_stale_reviews": True,
                    "require_code_owner_reviews": True,
                    "required_approving_review_count": 2,
                    "require_last_push_approval": True,
                    "bypass_pull_request_allowances": {
                        "users": ["foo", "bar"],
                        "teams": ["team_foo", "team_bar"],
                        "apps": ["123", "321"],
                    },
                },
                "restrictions": {
                    "users": ["foo", "bar"],
                    "teams": ["team_foo", "team_bar"],
                    "apps": ["123", "321"],
                },
                "required_linear_history": True,
                "allow_force_pushes": True,
                "allow_deletions": True,
                "block_creations": True,
                "required_conversation_resolution": True,
                "lock_branch": True,
                "allow_fork_syncing": True,
            },
        )

        self.assertEqual(
            rules.url,
            "https://api.github.com/repos/octocat/Hello-World/branches/main/protection",
        )

    async def test_update_protection_defaults(self):
        response = create_response()
        response.json.return_value = {
            "url": "https://api.github.com/repos/octocat/Hello-World/branches/main/protection",
        }
        self.client.put.return_value = response

        rules = await self.api.update_protection_rules(
            "foo/bar",
            "baz",
        )

        self.client.put.assert_awaited_once_with(
            "/repos/foo/bar/branches/baz/protection",
            data={
                "required_status_checks": None,
                "enforce_admins": None,
                "required_pull_request_reviews": None,
                "restrictions": None,
            },
        )

        self.assertEqual(
            rules.url,
            "https://api.github.com/repos/octocat/Hello-World/branches/main/protection",
        )

    async def test_update_protection_rules_up_to_date_branch(self):
        response = create_response()
        response.json.return_value = {
            "url": "https://api.github.com/repos/octocat/Hello-World/branches/main/protection",
        }
        self.client.put.return_value = response

        rules = await self.api.update_protection_rules(
            "foo/bar",
            "baz",
            require_branches_to_be_up_to_date=True,
        )

        self.client.put.assert_awaited_once_with(
            "/repos/foo/bar/branches/baz/protection",
            data={
                "required_status_checks": {
                    "strict": True,
                    "checks": [],
                },
                "enforce_admins": None,
                "required_pull_request_reviews": None,
                "restrictions": None,
            },
        )

        self.assertEqual(
            rules.url,
            "https://api.github.com/repos/octocat/Hello-World/branches/main/protection",
        )

    async def test_update_protection_rules_restriction_users(self):
        response = create_response()
        response.json.return_value = {
            "url": "https://api.github.com/repos/octocat/Hello-World/branches/main/protection",
        }
        self.client.put.return_value = response

        rules = await self.api.update_protection_rules(
            "foo/bar", "baz", restrictions_users=["foo", "bar"]
        )

        self.client.put.assert_awaited_once_with(
            "/repos/foo/bar/branches/baz/protection",
            data={
                "enforce_admins": None,
                "required_pull_request_reviews": None,
                "required_status_checks": None,
                "restrictions": {"users": ["foo", "bar"], "teams": []},
            },
        )

        self.assertEqual(
            rules.url,
            "https://api.github.com/repos/octocat/Hello-World/branches/main/protection",
        )

    async def test_update_protection_rules_restriction_teams(self):
        response = create_response()
        response.json.return_value = {
            "url": "https://api.github.com/repos/octocat/Hello-World/branches/main/protection",
        }
        self.client.put.return_value = response

        rules = await self.api.update_protection_rules(
            "foo/bar", "baz", restrictions_teams=["foo", "bar"]
        )

        self.client.put.assert_awaited_once_with(
            "/repos/foo/bar/branches/baz/protection",
            data={
                "enforce_admins": None,
                "required_pull_request_reviews": None,
                "required_status_checks": None,
                "restrictions": {"teams": ["foo", "bar"], "users": []},
            },
        )

        self.assertEqual(
            rules.url,
            "https://api.github.com/repos/octocat/Hello-World/branches/main/protection",
        )

    async def test_update_protection_rules_failure(self):
        response = create_response()
        error = HTTPStatusError("404", request=MagicMock(), response=response)
        response.raise_for_status.side_effect = error

        self.client.put.return_value = response

        with self.assertRaises(HTTPStatusError):
            await self.api.update_protection_rules(
                "foo/bar",
                "baz",
                enforce_admins=True,
            )

        self.client.put.assert_awaited_once_with(
            "/repos/foo/bar/branches/baz/protection",
            data={
                "enforce_admins": True,
                "required_status_checks": None,
                "required_pull_request_reviews": None,
                "restrictions": None,
            },
        )

    async def test_enable_enforce_admins(self):
        response = create_response()
        self.client.post.return_value = response

        await self.api.set_enforce_admins("foo/bar", "baz", enforce_admins=True)

        self.client.post.assert_awaited_once_with(
            "/repos/foo/bar/branches/baz/protection/enforce_admins"
        )

    async def test_disable_enforce_admins(self):
        response = create_response()
        self.client.delete.return_value = response

        await self.api.set_enforce_admins(
            "foo/bar", "baz", enforce_admins=False
        )

        self.client.delete.assert_awaited_once_with(
            "/repos/foo/bar/branches/baz/protection/enforce_admins"
        )

    async def test_enable_required_signatures(self):
        response = create_response()
        self.client.post.return_value = response

        await self.api.set_required_signatures(
            "foo/bar", "baz", required_signatures=True
        )

        self.client.post.assert_awaited_once_with(
            "/repos/foo/bar/branches/baz/protection/required_signatures"
        )

    async def test_disable_required_signatures(self):
        response = create_response()
        self.client.delete.return_value = response

        await self.api.set_required_signatures(
            "foo/bar", "baz", required_signatures=False
        )

        self.client.delete.assert_awaited_once_with(
            "/repos/foo/bar/branches/baz/protection/required_signatures"
        )

    async def test_update_required_status_checks(self):
        response = create_response()
        self.client.patch.return_value = response

        await self.api.update_required_status_checks(
            "foo/bar",
            "baz",
            required_status_checks=[("foo", "123"), ("bar", None)],
            require_branches_to_be_up_to_date=True,
        )

        self.client.patch.assert_awaited_once_with(
            "/repos/foo/bar/branches/baz/protection/required_status_checks",
            data={
                "strict": True,
                "checks": [
                    {"context": "foo", "app_id": "123"},
                    {"context": "bar"},
                ],
            },
        )

    async def test_remove_required_required_status_checks(self):
        response = create_response()
        self.client.delete.return_value = response

        await self.api.remove_required_status_checks("foo/bar", "baz")

        self.client.delete.assert_awaited_once_with(
            "/repos/foo/bar/branches/baz/protection/required_status_checks"
        )

    async def test_remove_required_required_status_checks_failure(self):
        response = create_response()
        error = HTTPStatusError("404", request=MagicMock(), response=response)
        response.raise_for_status.side_effect = error

        self.client.delete.return_value = response

        with self.assertRaises(HTTPStatusError):
            await self.api.remove_required_status_checks("foo/bar", "baz")

        self.client.delete.assert_awaited_once_with(
            "/repos/foo/bar/branches/baz/protection/required_status_checks"
        )
pontos-25.3.2/tests/github/api/test_client.py000066400000000000000000000214501476255566300212360ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later
#

# pylint: disable=arguments-differ,redefined-builtin

from unittest.mock import MagicMock, call, patch

from pontos.github.api.client import (
    DEFAULT_ACCEPT_HEADER,
    GITHUB_API_VERSION,
    GitHubAsyncRESTClient,
)
from pontos.github.api.helper import DEFAULT_GITHUB_API_URL
from tests import AsyncMock, IsolatedAsyncioTestCase, aiter, anext


class GitHubAsyncRESTClientTestCase(IsolatedAsyncioTestCase):
    @patch("pontos.github.api.client.httpx.AsyncClient")
    def setUp(self, async_client: MagicMock) -> None:
        self.http_client = AsyncMock()
        async_client.return_value = self.http_client
        self.client = GitHubAsyncRESTClient("token")

    async def test_get(self):
        await self.client.get("/foo/bar")

        self.http_client.get.assert_awaited_once_with(
            f"{DEFAULT_GITHUB_API_URL}/foo/bar",
            headers={
                "Accept": DEFAULT_ACCEPT_HEADER,
                "Authorization": "token token",
                "X-GitHub-Api-Version": GITHUB_API_VERSION,
            },
            params=None,
            follow_redirects=True,
        )

    async def test_get_url(self):
        await self.client.get("https://github.com/foo/bar")

        self.http_client.get.assert_awaited_once_with(
            "https://github.com/foo/bar",
            headers={
                "Accept": DEFAULT_ACCEPT_HEADER,
                "Authorization": "token token",
                "X-GitHub-Api-Version": GITHUB_API_VERSION,
            },
            params=None,
            follow_redirects=True,
        )

    async def test_get_all(self):
        url = "https://foo.bar"
        response1 = MagicMock(links={"next": {"url": url}})
        response2 = MagicMock(links=None)

        self.http_client.get.side_effect = [
            response1,
            response2,
        ]
        it = aiter(self.client.get_all("/foo/bar"))

        await anext(it)
        await anext(it)

        with self.assertRaises(StopAsyncIteration):
            await anext(it)

        self.http_client.get.assert_has_awaits(
            [
                call(
                    f"{DEFAULT_GITHUB_API_URL}/foo/bar",
                    headers={
                        "Accept": DEFAULT_ACCEPT_HEADER,
                        "Authorization": "token token",
                        "X-GitHub-Api-Version": GITHUB_API_VERSION,
                    },
                    params=None,
                    follow_redirects=True,
                ),
                call(
                    f"{url}",
                    headers={
                        "Accept": DEFAULT_ACCEPT_HEADER,
                        "Authorization": "token token",
                        "X-GitHub-Api-Version": GITHUB_API_VERSION,
                    },
                    params=None,
                    follow_redirects=True,
                ),
            ]
        )

    async def test_delete(self):
        await self.client.delete("/foo/bar")

        self.http_client.delete.assert_awaited_once_with(
            f"{DEFAULT_GITHUB_API_URL}/foo/bar",
            headers={
                "Accept": DEFAULT_ACCEPT_HEADER,
                "Authorization": "token token",
                "X-GitHub-Api-Version": GITHUB_API_VERSION,
            },
            params=None,
        )

    async def test_delete_url(self):
        await self.client.delete("https://github.com/foo/bar")

        self.http_client.delete.assert_awaited_once_with(
            "https://github.com/foo/bar",
            headers={
                "Accept": DEFAULT_ACCEPT_HEADER,
                "Authorization": "token token",
                "X-GitHub-Api-Version": GITHUB_API_VERSION,
            },
            params=None,
        )

    async def test_post(self):
        await self.client.post("/foo/bar", data={"foo": "bar"})

        self.http_client.post.assert_awaited_once_with(
            f"{DEFAULT_GITHUB_API_URL}/foo/bar",
            headers={
                "Accept": DEFAULT_ACCEPT_HEADER,
                "Authorization": "token token",
                "X-GitHub-Api-Version": GITHUB_API_VERSION,
            },
            json={"foo": "bar"},
            params=None,
            content=None,
        )

    async def test_post_url(self):
        await self.client.post(
            "https://github.com/foo/bar", data={"foo": "bar"}
        )

        self.http_client.post.assert_awaited_once_with(
            "https://github.com/foo/bar",
            headers={
                "Accept": DEFAULT_ACCEPT_HEADER,
                "Authorization": "token token",
                "X-GitHub-Api-Version": GITHUB_API_VERSION,
            },
            json={"foo": "bar"},
            params=None,
            content=None,
        )

    async def test_post_with_content_length(self):
        await self.client.post(
            "/foo/bar", data={"foo": "bar"}, content_length=123
        )

        self.http_client.post.assert_awaited_once_with(
            f"{DEFAULT_GITHUB_API_URL}/foo/bar",
            headers={
                "Accept": DEFAULT_ACCEPT_HEADER,
                "Authorization": "token token",
                "X-GitHub-Api-Version": GITHUB_API_VERSION,
                "Content-Length": "123",
            },
            json={"foo": "bar"},
            params=None,
            content=None,
        )

    async def test_put(self):
        await self.client.put("/foo/bar", data={"foo": "bar"})

        self.http_client.put.assert_awaited_once_with(
            f"{DEFAULT_GITHUB_API_URL}/foo/bar",
            headers={
                "Accept": DEFAULT_ACCEPT_HEADER,
                "Authorization": "token token",
                "X-GitHub-Api-Version": GITHUB_API_VERSION,
            },
            json={"foo": "bar"},
            params=None,
            content=None,
        )

    async def test_put_url(self):
        await self.client.put("https://github.com/foo/bar", data={"foo": "bar"})

        self.http_client.put.assert_awaited_once_with(
            "https://github.com/foo/bar",
            headers={
                "Accept": DEFAULT_ACCEPT_HEADER,
                "Authorization": "token token",
                "X-GitHub-Api-Version": GITHUB_API_VERSION,
            },
            json={"foo": "bar"},
            params=None,
            content=None,
        )

    async def test_patch(self):
        await self.client.patch("/foo/bar", data={"foo": "bar"})

        self.http_client.patch.assert_awaited_once_with(
            f"{DEFAULT_GITHUB_API_URL}/foo/bar",
            headers={
                "Accept": DEFAULT_ACCEPT_HEADER,
                "Authorization": "token token",
                "X-GitHub-Api-Version": GITHUB_API_VERSION,
            },
            json={"foo": "bar"},
            params=None,
            content=None,
        )

    async def test_patch_url(self):
        await self.client.patch(
            "https://github.com/foo/bar", data={"foo": "bar"}
        )

        self.http_client.patch.assert_awaited_once_with(
            "https://github.com/foo/bar",
            headers={
                "Accept": DEFAULT_ACCEPT_HEADER,
                "Authorization": "token token",
                "X-GitHub-Api-Version": GITHUB_API_VERSION,
            },
            json={"foo": "bar"},
            params=None,
            content=None,
        )

    async def test_stream(self):
        response = MagicMock()
        response.__aenter__.return_value = MagicMock()
        self.http_client.stream = MagicMock()
        self.http_client.stream.return_value = response

        async with self.client.stream("/foo/bar"):
            pass

        self.http_client.stream.assert_called_once_with(
            "GET",
            f"{DEFAULT_GITHUB_API_URL}/foo/bar",
            headers={
                "Accept": DEFAULT_ACCEPT_HEADER,
                "Authorization": "token token",
                "X-GitHub-Api-Version": GITHUB_API_VERSION,
            },
            follow_redirects=True,
        )

    async def test_stream_url(self):
        response = MagicMock()
        response.__aenter__.return_value = MagicMock()
        self.http_client.stream = MagicMock()
        self.http_client.stream.return_value = response

        async with self.client.stream("https://github.com/foo/bar"):
            pass

        self.http_client.stream.assert_called_once_with(
            "GET",
            "https://github.com/foo/bar",
            headers={
                "Accept": DEFAULT_ACCEPT_HEADER,
                "Authorization": "token token",
                "X-GitHub-Api-Version": GITHUB_API_VERSION,
            },
            follow_redirects=True,
        )

    async def test_context_manager(self):
        async with self.client:
            pass

        self.http_client.__aenter__.assert_awaited_once()
        self.http_client.__aexit__.assert_awaited_once()
pontos-25.3.2/tests/github/api/test_code_scanning.py000066400000000000000000001636221476255566300225620ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later

# ruff: noqa:E501

import json

from pontos.github.api.code_scanning import GitHubAsyncRESTCodeScanning
from pontos.github.models.base import SortOrder
from pontos.github.models.code_scanning import (
    AlertSort,
    AlertState,
    DefaultSetupState,
    DismissedReason,
    Language,
    QuerySuite,
    SarifProcessingStatus,
    Severity,
)
from tests import AsyncIteratorMock, aiter, anext
from tests.github.api import GitHubAsyncRESTTestCase, create_response

ALERTS = [
    {
        "number": 4,
        "created_at": "2020-02-13T12:29:18Z",
        "url": "https://api.github.com/repos/octocat/hello-world/code-scanning/alerts/4",
        "html_url": "https://github.com/octocat/hello-world/code-scanning/4",
        "state": "open",
        "dismissed_by": None,
        "dismissed_at": None,
        "dismissed_reason": None,
        "dismissed_comment": None,
        "rule": {
            "id": "js/zipslip",
            "severity": "error",
            "tags": ["security", "external/cwe/cwe-022"],
            "description": "Arbitrary file write during zip extraction",
            "name": "js/zipslip",
        },
        "tool": {"name": "CodeQL", "guid": None, "version": "2.4.0"},
        "most_recent_instance": {
            "ref": "refs/heads/main",
            "analysis_key": ".github/workflows/codeql-analysis.yml:CodeQL-Build",
            "category": ".github/workflows/codeql-analysis.yml:CodeQL-Build",
            "environment": "{}",
            "state": "open",
            "commit_sha": "39406e42cb832f683daa691dd652a8dc36ee8930",
            "message": {"text": "This path depends on a user-provided value."},
            "location": {
                "path": "spec-main/api-session-spec.ts",
                "start_line": 917,
                "end_line": 917,
                "start_column": 7,
                "end_column": 18,
            },
            "classifications": ["test"],
        },
        "instances_url": "https://api.github.com/repos/octocat/hello-world/code-scanning/alerts/4/instances",
        "repository": {
            "id": 1296269,
            "node_id": "MDEwOlJlcG9zaXRvcnkxMjk2MjY5",
            "name": "Hello-World",
            "full_name": "octocat/Hello-World",
            "owner": {
                "login": "octocat",
                "id": 1,
                "node_id": "MDQ6VXNlcjE=",
                "avatar_url": "https://github.com/images/error/octocat_happy.gif",
                "gravatar_id": "",
                "url": "https://api.github.com/users/octocat",
                "html_url": "https://github.com/octocat",
                "followers_url": "https://api.github.com/users/octocat/followers",
                "following_url": "https://api.github.com/users/octocat/following{/other_user}",
                "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
                "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
                "subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
                "organizations_url": "https://api.github.com/users/octocat/orgs",
                "repos_url": "https://api.github.com/users/octocat/repos",
                "events_url": "https://api.github.com/users/octocat/events{/privacy}",
                "received_events_url": "https://api.github.com/users/octocat/received_events",
                "type": "User",
                "site_admin": False,
            },
            "private": False,
            "html_url": "https://github.com/octocat/Hello-World",
            "description": "This your first repo!",
            "fork": False,
            "url": "https://api.github.com/repos/octocat/Hello-World",
            "archive_url": "https://api.github.com/repos/octocat/Hello-World/{archive_format}{/ref}",
            "assignees_url": "https://api.github.com/repos/octocat/Hello-World/assignees{/user}",
            "blobs_url": "https://api.github.com/repos/octocat/Hello-World/git/blobs{/sha}",
            "branches_url": "https://api.github.com/repos/octocat/Hello-World/branches{/branch}",
            "collaborators_url": "https://api.github.com/repos/octocat/Hello-World/collaborators{/collaborator}",
            "comments_url": "https://api.github.com/repos/octocat/Hello-World/comments{/number}",
            "commits_url": "https://api.github.com/repos/octocat/Hello-World/commits{/sha}",
            "compare_url": "https://api.github.com/repos/octocat/Hello-World/compare/{base}...{head}",
            "contents_url": "https://api.github.com/repos/octocat/Hello-World/contents/{+path}",
            "contributors_url": "https://api.github.com/repos/octocat/Hello-World/contributors",
            "deployments_url": "https://api.github.com/repos/octocat/Hello-World/deployments",
            "downloads_url": "https://api.github.com/repos/octocat/Hello-World/downloads",
            "events_url": "https://api.github.com/repos/octocat/Hello-World/events",
            "forks_url": "https://api.github.com/repos/octocat/Hello-World/forks",
            "git_commits_url": "https://api.github.com/repos/octocat/Hello-World/git/commits{/sha}",
            "git_refs_url": "https://api.github.com/repos/octocat/Hello-World/git/refs{/sha}",
            "git_tags_url": "https://api.github.com/repos/octocat/Hello-World/git/tags{/sha}",
            "issue_comment_url": "https://api.github.com/repos/octocat/Hello-World/issues/comments{/number}",
            "issue_events_url": "https://api.github.com/repos/octocat/Hello-World/issues/events{/number}",
            "issues_url": "https://api.github.com/repos/octocat/Hello-World/issues{/number}",
            "keys_url": "https://api.github.com/repos/octocat/Hello-World/keys{/key_id}",
            "labels_url": "https://api.github.com/repos/octocat/Hello-World/labels{/name}",
            "languages_url": "https://api.github.com/repos/octocat/Hello-World/languages",
            "merges_url": "https://api.github.com/repos/octocat/Hello-World/merges",
            "milestones_url": "https://api.github.com/repos/octocat/Hello-World/milestones{/number}",
            "notifications_url": "https://api.github.com/repos/octocat/Hello-World/notifications{?since,all,participating}",
            "pulls_url": "https://api.github.com/repos/octocat/Hello-World/pulls{/number}",
            "releases_url": "https://api.github.com/repos/octocat/Hello-World/releases{/id}",
            "stargazers_url": "https://api.github.com/repos/octocat/Hello-World/stargazers",
            "statuses_url": "https://api.github.com/repos/octocat/Hello-World/statuses/{sha}",
            "subscribers_url": "https://api.github.com/repos/octocat/Hello-World/subscribers",
            "subscription_url": "https://api.github.com/repos/octocat/Hello-World/subscription",
            "tags_url": "https://api.github.com/repos/octocat/Hello-World/tags",
            "teams_url": "https://api.github.com/repos/octocat/Hello-World/teams",
            "trees_url": "https://api.github.com/repos/octocat/Hello-World/git/trees{/sha}",
            "hooks_url": "https://api.github.com/repos/octocat/Hello-World/hooks",
        },
    },
    {
        "number": 3,
        "created_at": "2020-02-13T12:29:18Z",
        "url": "https://api.github.com/repos/octocat/hello-world/code-scanning/alerts/3",
        "html_url": "https://github.com/octocat/hello-world/code-scanning/3",
        "state": "dismissed",
        "dismissed_by": {
            "login": "octocat",
            "id": 1,
            "node_id": "MDQ6VXNlcjE=",
            "avatar_url": "https://github.com/images/error/octocat_happy.gif",
            "gravatar_id": "",
            "url": "https://api.github.com/users/octocat",
            "html_url": "https://github.com/octocat",
            "followers_url": "https://api.github.com/users/octocat/followers",
            "following_url": "https://api.github.com/users/octocat/following{/other_user}",
            "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
            "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
            "subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
            "organizations_url": "https://api.github.com/users/octocat/orgs",
            "repos_url": "https://api.github.com/users/octocat/repos",
            "events_url": "https://api.github.com/users/octocat/events{/privacy}",
            "received_events_url": "https://api.github.com/users/octocat/received_events",
            "type": "User",
            "site_admin": False,
        },
        "dismissed_at": "2020-02-14T12:29:18Z",
        "dismissed_reason": "false positive",
        "dismissed_comment": "This alert is not actually correct, because there's a sanitizer included in the library.",
        "rule": {
            "id": "js/zipslip",
            "severity": "error",
            "tags": ["security", "external/cwe/cwe-022"],
            "description": "Arbitrary file write during zip extraction",
            "name": "js/zipslip",
        },
        "tool": {"name": "CodeQL", "guid": None, "version": "2.4.0"},
        "most_recent_instance": {
            "ref": "refs/heads/main",
            "analysis_key": ".github/workflows/codeql-analysis.yml:CodeQL-Build",
            "category": ".github/workflows/codeql-analysis.yml:CodeQL-Build",
            "environment": "{}",
            "state": "open",
            "commit_sha": "39406e42cb832f683daa691dd652a8dc36ee8930",
            "message": {"text": "This path depends on a user-provided value."},
            "location": {
                "path": "lib/ab12-gen.js",
                "start_line": 917,
                "end_line": 917,
                "start_column": 7,
                "end_column": 18,
            },
            "classifications": [],
        },
        "instances_url": "https://api.github.com/repos/octocat/hello-world/code-scanning/alerts/3/instances",
        "repository": {
            "id": 1296269,
            "node_id": "MDEwOlJlcG9zaXRvcnkxMjk2MjY5",
            "name": "Hello-World",
            "full_name": "octocat/Hello-World",
            "owner": {
                "login": "octocat",
                "id": 1,
                "node_id": "MDQ6VXNlcjE=",
                "avatar_url": "https://github.com/images/error/octocat_happy.gif",
                "gravatar_id": "",
                "url": "https://api.github.com/users/octocat",
                "html_url": "https://github.com/octocat",
                "followers_url": "https://api.github.com/users/octocat/followers",
                "following_url": "https://api.github.com/users/octocat/following{/other_user}",
                "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
                "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
                "subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
                "organizations_url": "https://api.github.com/users/octocat/orgs",
                "repos_url": "https://api.github.com/users/octocat/repos",
                "events_url": "https://api.github.com/users/octocat/events{/privacy}",
                "received_events_url": "https://api.github.com/users/octocat/received_events",
                "type": "User",
                "site_admin": False,
            },
            "private": False,
            "html_url": "https://github.com/octocat/Hello-World",
            "description": "This your first repo!",
            "fork": False,
            "url": "https://api.github.com/repos/octocat/Hello-World",
            "archive_url": "https://api.github.com/repos/octocat/Hello-World/{archive_format}{/ref}",
            "assignees_url": "https://api.github.com/repos/octocat/Hello-World/assignees{/user}",
            "blobs_url": "https://api.github.com/repos/octocat/Hello-World/git/blobs{/sha}",
            "branches_url": "https://api.github.com/repos/octocat/Hello-World/branches{/branch}",
            "collaborators_url": "https://api.github.com/repos/octocat/Hello-World/collaborators{/collaborator}",
            "comments_url": "https://api.github.com/repos/octocat/Hello-World/comments{/number}",
            "commits_url": "https://api.github.com/repos/octocat/Hello-World/commits{/sha}",
            "compare_url": "https://api.github.com/repos/octocat/Hello-World/compare/{base}...{head}",
            "contents_url": "https://api.github.com/repos/octocat/Hello-World/contents/{+path}",
            "contributors_url": "https://api.github.com/repos/octocat/Hello-World/contributors",
            "deployments_url": "https://api.github.com/repos/octocat/Hello-World/deployments",
            "downloads_url": "https://api.github.com/repos/octocat/Hello-World/downloads",
            "events_url": "https://api.github.com/repos/octocat/Hello-World/events",
            "forks_url": "https://api.github.com/repos/octocat/Hello-World/forks",
            "git_commits_url": "https://api.github.com/repos/octocat/Hello-World/git/commits{/sha}",
            "git_refs_url": "https://api.github.com/repos/octocat/Hello-World/git/refs{/sha}",
            "git_tags_url": "https://api.github.com/repos/octocat/Hello-World/git/tags{/sha}",
            "issue_comment_url": "https://api.github.com/repos/octocat/Hello-World/issues/comments{/number}",
            "issue_events_url": "https://api.github.com/repos/octocat/Hello-World/issues/events{/number}",
            "issues_url": "https://api.github.com/repos/octocat/Hello-World/issues{/number}",
            "keys_url": "https://api.github.com/repos/octocat/Hello-World/keys{/key_id}",
            "labels_url": "https://api.github.com/repos/octocat/Hello-World/labels{/name}",
            "languages_url": "https://api.github.com/repos/octocat/Hello-World/languages",
            "merges_url": "https://api.github.com/repos/octocat/Hello-World/merges",
            "milestones_url": "https://api.github.com/repos/octocat/Hello-World/milestones{/number}",
            "notifications_url": "https://api.github.com/repos/octocat/Hello-World/notifications{?since,all,participating}",
            "pulls_url": "https://api.github.com/repos/octocat/Hello-World/pulls{/number}",
            "releases_url": "https://api.github.com/repos/octocat/Hello-World/releases{/id}",
            "stargazers_url": "https://api.github.com/repos/octocat/Hello-World/stargazers",
            "statuses_url": "https://api.github.com/repos/octocat/Hello-World/statuses/{sha}",
            "subscribers_url": "https://api.github.com/repos/octocat/Hello-World/subscribers",
            "subscription_url": "https://api.github.com/repos/octocat/Hello-World/subscription",
            "tags_url": "https://api.github.com/repos/octocat/Hello-World/tags",
            "teams_url": "https://api.github.com/repos/octocat/Hello-World/teams",
            "trees_url": "https://api.github.com/repos/octocat/Hello-World/git/trees{/sha}",
            "hooks_url": "https://api.github.com/repos/octocat/Hello-World/hooks",
        },
    },
]

ANALYSES = [
    {
        "ref": "refs/heads/main",
        "commit_sha": "d99612c3e1f2970085cfbaeadf8f010ef69bad83",
        "analysis_key": ".github/workflows/codeql-analysis.yml:analyze",
        "environment": '{"language":"python"}',
        "error": "",
        "category": ".github/workflows/codeql-analysis.yml:analyze/language:python",
        "created_at": "2020-08-27T15:05:21Z",
        "results_count": 17,
        "rules_count": 49,
        "id": 201,
        "url": "https://api.github.com/repos/octocat/hello-world/code-scanning/analyses/201",
        "sarif_id": "6c81cd8e-b078-4ac3-a3be-1dad7dbd0b53",
        "tool": {"name": "CodeQL", "guid": None, "version": "2.4.0"},
        "deletable": True,
        "warning": "",
    },
    {
        "ref": "refs/heads/my-branch",
        "commit_sha": "c8cff6510d4d084fb1b4aa13b64b97ca12b07321",
        "analysis_key": ".github/workflows/shiftleft.yml:build",
        "environment": "{}",
        "error": "",
        "category": ".github/workflows/shiftleft.yml:build/",
        "created_at": "2020-08-31T22:46:44Z",
        "results_count": 17,
        "rules_count": 32,
        "id": 200,
        "url": "https://api.github.com/repos/octocat/hello-world/code-scanning/analyses/200",
        "sarif_id": "8981cd8e-b078-4ac3-a3be-1dad7dbd0b582",
        "tool": {
            "name": "Python Security Analysis",
            "guid": None,
            "version": "1.2.0",
        },
        "deletable": True,
        "warning": "",
    },
]


class GitHubAsyncRESTCodeScanningTestCase(GitHubAsyncRESTTestCase):
    api_cls = GitHubAsyncRESTCodeScanning

    async def test_organization_alerts(self):
        response = create_response()
        response.json.return_value = ALERTS

        self.client.get_all.return_value = AsyncIteratorMock([response])

        async_it = aiter(self.api.organization_alerts("foo"))
        alert = await anext(async_it)
        self.assertEqual(alert.number, 4)
        alert = await anext(async_it)
        self.assertEqual(alert.number, 3)

        with self.assertRaises(StopAsyncIteration):
            await anext(async_it)

        self.client.get_all.assert_called_once_with(
            "/orgs/foo/code-scanning/alerts",
            params={"per_page": "100", "sort": "created", "direction": "desc"},
        )

    async def test_organization_alerts_state(self):
        response = create_response()
        response.json.return_value = ALERTS

        self.client.get_all.return_value = AsyncIteratorMock([response])

        async_it = aiter(
            self.api.organization_alerts("foo", state=AlertState.FIXED)
        )
        alert = await anext(async_it)
        self.assertEqual(alert.number, 4)
        alert = await anext(async_it)
        self.assertEqual(alert.number, 3)

        with self.assertRaises(StopAsyncIteration):
            await anext(async_it)

        self.client.get_all.assert_called_once_with(
            "/orgs/foo/code-scanning/alerts",
            params={
                "per_page": "100",
                "sort": "created",
                "direction": "desc",
                "state": "fixed",
            },
        )

    async def test_organization_alerts_tool_name(self):
        response = create_response()
        response.json.return_value = ALERTS

        self.client.get_all.return_value = AsyncIteratorMock([response])

        async_it = aiter(
            self.api.organization_alerts("foo", tool_name="CodeQL")
        )
        alert = await anext(async_it)
        self.assertEqual(alert.number, 4)
        alert = await anext(async_it)
        self.assertEqual(alert.number, 3)

        with self.assertRaises(StopAsyncIteration):
            await anext(async_it)

        self.client.get_all.assert_called_once_with(
            "/orgs/foo/code-scanning/alerts",
            params={
                "per_page": "100",
                "sort": "created",
                "direction": "desc",
                "tool_name": "CodeQL",
            },
        )

    async def test_organization_alerts_tool_guid(self):
        response = create_response()
        response.json.return_value = ALERTS

        self.client.get_all.return_value = AsyncIteratorMock([response])

        async_it = aiter(self.api.organization_alerts("foo", tool_guid=None))
        alert = await anext(async_it)
        self.assertEqual(alert.number, 4)
        alert = await anext(async_it)
        self.assertEqual(alert.number, 3)

        with self.assertRaises(StopAsyncIteration):
            await anext(async_it)

        self.client.get_all.assert_called_once_with(
            "/orgs/foo/code-scanning/alerts",
            params={
                "per_page": "100",
                "sort": "created",
                "direction": "desc",
                "tool_guid": None,
            },
        )

    async def test_organization_alerts_severity(self):
        response = create_response()
        response.json.return_value = ALERTS

        self.client.get_all.return_value = AsyncIteratorMock([response])

        async_it = aiter(
            self.api.organization_alerts("foo", severity=Severity.ERROR)
        )
        alert = await anext(async_it)
        self.assertEqual(alert.number, 4)
        alert = await anext(async_it)
        self.assertEqual(alert.number, 3)

        with self.assertRaises(StopAsyncIteration):
            await anext(async_it)

        self.client.get_all.assert_called_once_with(
            "/orgs/foo/code-scanning/alerts",
            params={
                "per_page": "100",
                "sort": "created",
                "direction": "desc",
                "severity": "error",
            },
        )

    async def test_organization_alerts_sort(self):
        response = create_response()
        response.json.return_value = ALERTS

        self.client.get_all.return_value = AsyncIteratorMock([response])

        async_it = aiter(
            self.api.organization_alerts("foo", sort=AlertSort.UPDATED)
        )
        alert = await anext(async_it)
        self.assertEqual(alert.number, 4)
        alert = await anext(async_it)
        self.assertEqual(alert.number, 3)

        with self.assertRaises(StopAsyncIteration):
            await anext(async_it)

        self.client.get_all.assert_called_once_with(
            "/orgs/foo/code-scanning/alerts",
            params={
                "per_page": "100",
                "sort": "updated",
                "direction": "desc",
            },
        )

    async def test_organization_alerts_direction(self):
        response = create_response()
        response.json.return_value = ALERTS

        self.client.get_all.return_value = AsyncIteratorMock([response])

        async_it = aiter(
            self.api.organization_alerts("foo", direction=SortOrder.ASC)
        )
        alert = await anext(async_it)
        self.assertEqual(alert.number, 4)
        alert = await anext(async_it)
        self.assertEqual(alert.number, 3)

        with self.assertRaises(StopAsyncIteration):
            await anext(async_it)

        self.client.get_all.assert_called_once_with(
            "/orgs/foo/code-scanning/alerts",
            params={
                "per_page": "100",
                "sort": "created",
                "direction": "asc",
            },
        )

    async def test_alerts(self):
        response = create_response()
        response.json.return_value = ALERTS

        self.client.get_all.return_value = AsyncIteratorMock([response])

        async_it = aiter(self.api.alerts("foo/bar"))
        alert = await anext(async_it)
        self.assertEqual(alert.number, 4)
        alert = await anext(async_it)
        self.assertEqual(alert.number, 3)

        with self.assertRaises(StopAsyncIteration):
            await anext(async_it)

        self.client.get_all.assert_called_once_with(
            "/repos/foo/bar/code-scanning/alerts",
            params={"per_page": "100", "sort": "created", "direction": "desc"},
        )

    async def test_alerts_state(self):
        response = create_response()
        response.json.return_value = ALERTS

        self.client.get_all.return_value = AsyncIteratorMock([response])

        async_it = aiter(self.api.alerts("foo/bar", state=AlertState.FIXED))
        alert = await anext(async_it)
        self.assertEqual(alert.number, 4)
        alert = await anext(async_it)
        self.assertEqual(alert.number, 3)

        with self.assertRaises(StopAsyncIteration):
            await anext(async_it)

        self.client.get_all.assert_called_once_with(
            "/repos/foo/bar/code-scanning/alerts",
            params={
                "per_page": "100",
                "sort": "created",
                "direction": "desc",
                "state": "fixed",
            },
        )

    async def test_alerts_tool_name(self):
        response = create_response()
        response.json.return_value = ALERTS

        self.client.get_all.return_value = AsyncIteratorMock([response])

        async_it = aiter(self.api.alerts("foo/bar", tool_name="CodeQL"))
        alert = await anext(async_it)
        self.assertEqual(alert.number, 4)
        alert = await anext(async_it)
        self.assertEqual(alert.number, 3)

        with self.assertRaises(StopAsyncIteration):
            await anext(async_it)

        self.client.get_all.assert_called_once_with(
            "/repos/foo/bar/code-scanning/alerts",
            params={
                "per_page": "100",
                "sort": "created",
                "direction": "desc",
                "tool_name": "CodeQL",
            },
        )

    async def test_alerts_tool_guid(self):
        response = create_response()
        response.json.return_value = ALERTS

        self.client.get_all.return_value = AsyncIteratorMock([response])

        async_it = aiter(self.api.alerts("foo/bar", tool_guid=None))
        alert = await anext(async_it)
        self.assertEqual(alert.number, 4)
        alert = await anext(async_it)
        self.assertEqual(alert.number, 3)

        with self.assertRaises(StopAsyncIteration):
            await anext(async_it)

        self.client.get_all.assert_called_once_with(
            "/repos/foo/bar/code-scanning/alerts",
            params={
                "per_page": "100",
                "sort": "created",
                "direction": "desc",
                "tool_guid": None,
            },
        )

    async def test_alerts_severity(self):
        response = create_response()
        response.json.return_value = ALERTS

        self.client.get_all.return_value = AsyncIteratorMock([response])

        async_it = aiter(self.api.alerts("foo/bar", severity=Severity.ERROR))
        alert = await anext(async_it)
        self.assertEqual(alert.number, 4)
        alert = await anext(async_it)
        self.assertEqual(alert.number, 3)

        with self.assertRaises(StopAsyncIteration):
            await anext(async_it)

        self.client.get_all.assert_called_once_with(
            "/repos/foo/bar/code-scanning/alerts",
            params={
                "per_page": "100",
                "sort": "created",
                "direction": "desc",
                "severity": "error",
            },
        )

    async def test_alerts_sort(self):
        response = create_response()
        response.json.return_value = ALERTS

        self.client.get_all.return_value = AsyncIteratorMock([response])

        async_it = aiter(self.api.alerts("foo/bar", sort=AlertSort.UPDATED))
        alert = await anext(async_it)
        self.assertEqual(alert.number, 4)
        alert = await anext(async_it)
        self.assertEqual(alert.number, 3)

        with self.assertRaises(StopAsyncIteration):
            await anext(async_it)

        self.client.get_all.assert_called_once_with(
            "/repos/foo/bar/code-scanning/alerts",
            params={
                "per_page": "100",
                "sort": "updated",
                "direction": "desc",
            },
        )

    async def test_alerts_direction(self):
        response = create_response()
        response.json.return_value = ALERTS

        self.client.get_all.return_value = AsyncIteratorMock([response])

        async_it = aiter(self.api.alerts("foo/bar", direction=SortOrder.ASC))
        alert = await anext(async_it)
        self.assertEqual(alert.number, 4)
        alert = await anext(async_it)
        self.assertEqual(alert.number, 3)

        with self.assertRaises(StopAsyncIteration):
            await anext(async_it)

        self.client.get_all.assert_called_once_with(
            "/repos/foo/bar/code-scanning/alerts",
            params={
                "per_page": "100",
                "sort": "created",
                "direction": "asc",
            },
        )

    async def test_alert(self):
        response = create_response()
        response.json.return_value = {
            "number": 42,
            "created_at": "2020-06-19T11:21:34Z",
            "url": "https://api.github.com/repos/octocat/hello-world/code-scanning/alerts/42",
            "html_url": "https://github.com/octocat/hello-world/code-scanning/42",
            "state": "dismissed",
            "fixed_at": None,
            "dismissed_by": {
                "login": "octocat",
                "id": 54933897,
                "node_id": "MDQ6VXNlcjE=",
                "avatar_url": "https://github.com/images/error/octocat_happy.gif",
                "gravatar_id": "",
                "url": "https://api.github.com/users/octocat",
                "html_url": "https://github.com/octocat",
                "followers_url": "https://api.github.com/users/octocat/followers",
                "following_url": "https://api.github.com/users/octocat/following{/other_user}",
                "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
                "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
                "subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
                "organizations_url": "https://api.github.com/users/octocat/orgs",
                "repos_url": "https://api.github.com/users/octocat/repos",
                "events_url": "https://api.github.com/users/octocat/events{/privacy}",
                "received_events_url": "https://api.github.com/users/octocat/received_events",
                "type": "User",
                "site_admin": False,
            },
            "dismissed_at": "2020-02-14T12:29:18Z",
            "dismissed_reason": "false positive",
            "dismissed_comment": "This alert is not actually correct, because there's a sanitizer included in the library.",
            "rule": {
                "id": "js/zipslip",
                "severity": "error",
                "security_severity_level": "high",
                "description": 'Arbitrary file write during zip extraction ("Zip Slip")',
                "name": "js/zipslip",
                "full_description": "Extracting files from a malicious zip archive without validating that the destination file path is within the destination directory can cause files outside the destination directory to be overwritten.",
                "tags": ["security", "external/cwe/cwe-022"],
                "help": '# Arbitrary file write during zip extraction ("Zip Slip")\\nExtracting files from a malicious zip archive without validating that the destination file path is within the destination directory can cause files outside the destination directory to be overwritten ...',
                "help_uri": "https://codeql.github.com/",
            },
            "tool": {"name": "CodeQL", "guid": None, "version": "2.4.0"},
            "most_recent_instance": {
                "ref": "refs/heads/main",
                "analysis_key": ".github/workflows/codeql-analysis.yml:CodeQL-Build",
                "category": ".github/workflows/codeql-analysis.yml:CodeQL-Build",
                "environment": "{}",
                "state": "dismissed",
                "commit_sha": "39406e42cb832f683daa691dd652a8dc36ee8930",
                "message": {
                    "text": "This path depends on a user-provided value."
                },
                "location": {
                    "path": "spec-main/api-session-spec.ts",
                    "start_line": 917,
                    "end_line": 917,
                    "start_column": 7,
                    "end_column": 18,
                },
                "classifications": ["test"],
            },
            "instances_url": "https://api.github.com/repos/octocat/hello-world/code-scanning/alerts/42/instances",
        }
        self.client.get.return_value = response

        alert = await self.api.alert(
            "foo/bar",
            42,
        )

        self.client.get.assert_awaited_once_with(
            "/repos/foo/bar/code-scanning/alerts/42",
        )

        self.assertEqual(alert.number, 42)

    async def test_update(self):
        response = create_response()
        response.json.return_value = {
            "number": 42,
            "created_at": "2020-08-25T21:28:36Z",
            "url": "https://api.github.com/repos/octocat/hello-world/code-scanning/alerts/42",
            "html_url": "https://github.com/octocat/hello-world/code-scanning/42",
            "state": "dismissed",
            "fixed_at": None,
            "dismissed_by": {
                "login": "octocat",
                "id": 1,
                "node_id": "MDQ6VXNlcjE=",
                "avatar_url": "https://github.com/images/error/octocat_happy.gif",
                "gravatar_id": "",
                "url": "https://api.github.com/users/octocat",
                "html_url": "https://github.com/octocat",
                "followers_url": "https://api.github.com/users/octocat/followers",
                "following_url": "https://api.github.com/users/octocat/following{/other_user}",
                "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
                "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
                "subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
                "organizations_url": "https://api.github.com/users/octocat/orgs",
                "repos_url": "https://api.github.com/users/octocat/repos",
                "events_url": "https://api.github.com/users/octocat/events{/privacy}",
                "received_events_url": "https://api.github.com/users/octocat/received_events",
                "type": "User",
                "site_admin": False,
            },
            "dismissed_at": "2020-09-02T22:34:56Z",
            "dismissed_reason": "false positive",
            "dismissed_comment": "This alert is not actually correct, because there's a sanitizer included in the library.",
            "rule": {
                "id": "js/zipslip",
                "severity": "error",
                "security_severity_level": "high",
                "description": 'Arbitrary file write during zip extraction ("Zip Slip")',
                "name": "js/zipslip",
                "full_description": "Extracting files from a malicious zip archive without validating that the destination file path is within the destination directory can cause files outside the destination directory to be overwritten.",
                "tags": ["security", "external/cwe/cwe-022"],
                "help": '# Arbitrary file write during zip extraction ("Zip Slip")\\nExtracting files from a malicious zip archive without validating that the destination file path is within the destination directory can cause files outside the destination directory to be overwritten ...',
                "help_uri": "https://codeql.github.com/",
            },
            "tool": {"name": "CodeQL", "guid": None, "version": "2.4.0"},
            "most_recent_instance": {
                "ref": "refs/heads/main",
                "analysis_key": ".github/workflows/codeql-analysis.yml:CodeQL-Build",
                "category": ".github/workflows/codeql-analysis.yml:CodeQL-Build",
                "environment": "{}",
                "state": "dismissed",
                "commit_sha": "39406e42cb832f683daa691dd652a8dc36ee8930",
                "message": {
                    "text": "This path depends on a user-provided value."
                },
                "location": {
                    "path": "spec-main/api-session-spec.ts",
                    "start_line": 917,
                    "end_line": 917,
                    "start_column": 7,
                    "end_column": 18,
                },
                "classifications": ["test"],
            },
            "instances_url": "https://api.github.com/repos/octocat/hello-world/code-scanning/alerts/42/instances",
        }
        self.client.patch.return_value = response

        alert = await self.api.update_alert(
            "foo/bar",
            42,
            AlertState.DISMISSED,
            dismissed_reason=DismissedReason.USED_IN_TESTS,
            dismissed_comment="Only used in tests",
        )

        self.client.patch.assert_awaited_once_with(
            "/repos/foo/bar/code-scanning/alerts/42",
            data={
                "state": "dismissed",
                "dismissed_reason": "used in tests",
                "dismissed_comment": "Only used in tests",
            },
        )

        self.assertEqual(alert.number, 42)
        self.assertIsNone(alert.repository)

    async def test_alerts_instances(self):
        response = create_response()
        response.json.return_value = [
            {
                "ref": "refs/heads/main",
                "analysis_key": ".github/workflows/codeql-analysis.yml:CodeQL-Build",
                "environment": "",
                "category": ".github/workflows/codeql-analysis.yml:CodeQL-Build",
                "state": "open",
                "fixed_at": None,
                "commit_sha": "39406e42cb832f683daa691dd652a8dc36ee8930",
                "message": {
                    "text": "This path depends on a user-provided value."
                },
                "location": {
                    "path": "lib/ab12-gen.js",
                    "start_line": 917,
                    "end_line": 917,
                    "start_column": 7,
                    "end_column": 18,
                },
                "classifications": ["library"],
            },
            {
                "ref": "refs/pull/3740/merge",
                "analysis_key": ".github/workflows/codeql-analysis.yml:CodeQL-Build",
                "environment": "",
                "category": ".github/workflows/codeql-analysis.yml:CodeQL-Build",
                "state": "fixed",
                "fixed_at": "2020-02-14T12:29:18Z",
                "commit_sha": "b09da05606e27f463a2b49287684b4ae777092f2",
                "message": {
                    "text": "This suffix check is missing a length comparison to correctly handle lastIndexOf returning -1."
                },
                "location": {
                    "path": "app/script.js",
                    "start_line": 2,
                    "end_line": 2,
                    "start_column": 10,
                    "end_column": 50,
                },
                "classifications": ["source"],
            },
        ]

        self.client.get_all.return_value = AsyncIteratorMock([response])

        async_it = aiter(self.api.instances("foo/bar", 1))
        instance = await anext(async_it)
        self.assertEqual(instance.ref, "refs/heads/main")
        instance = await anext(async_it)
        self.assertEqual(instance.ref, "refs/pull/3740/merge")

        with self.assertRaises(StopAsyncIteration):
            await anext(async_it)

        self.client.get_all.assert_called_once_with(
            "/repos/foo/bar/code-scanning/alerts/1/instances",
            params={
                "per_page": "100",
            },
        )

    async def test_analyses(self):
        response = create_response()
        response.json.return_value = ANALYSES

        self.client.get_all.return_value = AsyncIteratorMock([response])

        async_it = aiter(self.api.analyses("foo/bar"))
        instance = await anext(async_it)
        self.assertEqual(instance.ref, "refs/heads/main")
        instance = await anext(async_it)
        self.assertEqual(instance.ref, "refs/heads/my-branch")

        with self.assertRaises(StopAsyncIteration):
            await anext(async_it)

        self.client.get_all.assert_called_once_with(
            "/repos/foo/bar/code-scanning/analyses",
            params={
                "per_page": "100",
                "direction": "desc",
            },
        )

    async def test_analyses_tool_name(self):
        response = create_response()
        response.json.return_value = ANALYSES

        self.client.get_all.return_value = AsyncIteratorMock([response])

        async_it = aiter(self.api.analyses("foo/bar", tool_name="CodeQL"))
        instance = await anext(async_it)
        self.assertEqual(instance.ref, "refs/heads/main")
        instance = await anext(async_it)
        self.assertEqual(instance.ref, "refs/heads/my-branch")

        with self.assertRaises(StopAsyncIteration):
            await anext(async_it)

        self.client.get_all.assert_called_once_with(
            "/repos/foo/bar/code-scanning/analyses",
            params={
                "per_page": "100",
                "direction": "desc",
                "tool_name": "CodeQL",
            },
        )

    async def test_analyses_tool_guid(self):
        response = create_response()
        response.json.return_value = ANALYSES

        self.client.get_all.return_value = AsyncIteratorMock([response])

        async_it = aiter(self.api.analyses("foo/bar", tool_guid="123"))
        instance = await anext(async_it)
        self.assertEqual(instance.ref, "refs/heads/main")
        instance = await anext(async_it)
        self.assertEqual(instance.ref, "refs/heads/my-branch")

        with self.assertRaises(StopAsyncIteration):
            await anext(async_it)

        self.client.get_all.assert_called_once_with(
            "/repos/foo/bar/code-scanning/analyses",
            params={
                "per_page": "100",
                "direction": "desc",
                "tool_guid": "123",
            },
        )

    async def test_analyses_sarif_id(self):
        response = create_response()
        response.json.return_value = ANALYSES

        self.client.get_all.return_value = AsyncIteratorMock([response])

        async_it = aiter(self.api.analyses("foo/bar", sarif_id="123"))
        instance = await anext(async_it)
        self.assertEqual(instance.ref, "refs/heads/main")
        instance = await anext(async_it)
        self.assertEqual(instance.ref, "refs/heads/my-branch")

        with self.assertRaises(StopAsyncIteration):
            await anext(async_it)

        self.client.get_all.assert_called_once_with(
            "/repos/foo/bar/code-scanning/analyses",
            params={
                "per_page": "100",
                "direction": "desc",
                "sarif_id": "123",
            },
        )

    async def test_analyses_ref(self):
        response = create_response()
        response.json.return_value = ANALYSES

        self.client.get_all.return_value = AsyncIteratorMock([response])

        async_it = aiter(self.api.analyses("foo/bar", ref="refs/heads/main"))
        instance = await anext(async_it)
        self.assertEqual(instance.ref, "refs/heads/main")
        instance = await anext(async_it)
        self.assertEqual(instance.ref, "refs/heads/my-branch")

        with self.assertRaises(StopAsyncIteration):
            await anext(async_it)

        self.client.get_all.assert_called_once_with(
            "/repos/foo/bar/code-scanning/analyses",
            params={
                "per_page": "100",
                "direction": "desc",
                "ref": "refs/heads/main",
            },
        )

    async def test_analyses_direction(self):
        response = create_response()
        response.json.return_value = ANALYSES

        self.client.get_all.return_value = AsyncIteratorMock([response])

        async_it = aiter(self.api.analyses("foo/bar", direction=SortOrder.ASC))
        instance = await anext(async_it)
        self.assertEqual(instance.ref, "refs/heads/main")
        instance = await anext(async_it)
        self.assertEqual(instance.ref, "refs/heads/my-branch")

        with self.assertRaises(StopAsyncIteration):
            await anext(async_it)

        self.client.get_all.assert_called_once_with(
            "/repos/foo/bar/code-scanning/analyses",
            params={
                "per_page": "100",
                "direction": "asc",
            },
        )

    async def test_analysis(self):
        response = create_response()
        response.json.return_value = {
            "ref": "refs/heads/main",
            "commit_sha": "c18c69115654ff0166991962832dc2bd7756e655",
            "analysis_key": ".github/workflows/codeql-analysis.yml:analyze",
            "environment": '{"language":"javascript"}',
            "error": "",
            "category": ".github/workflows/codeql-analysis.yml:analyze/language:javascript",
            "created_at": "2021-01-13T11:55:49Z",
            "results_count": 3,
            "rules_count": 67,
            "id": 3602840,
            "url": "https://api.github.com/repos/octocat/hello-world/code-scanning/analyses/201",
            "sarif_id": "47177e22-5596-11eb-80a1-c1e54ef945c6",
            "tool": {"name": "CodeQL", "guid": None, "version": "2.4.0"},
            "deletable": True,
            "warning": "",
        }
        self.client.get.return_value = response

        alert = await self.api.analysis(
            "foo/bar",
            42,
        )

        self.client.get.assert_awaited_once_with(
            "/repos/foo/bar/code-scanning/analyses/42",
        )

        self.assertEqual(alert.ref, "refs/heads/main")

    async def test_delete_analysis(self):
        response = create_response()
        response.json.return_value = {
            "next_analysis_url": "https://api.github.com/repos/octocat/hello-world/code-scanning/analyses/41",
            "confirm_delete_url": "https://api.github.com/repos/octocat/hello-world/code-scanning/analyses/41?confirm_delete",
        }
        self.client.delete.return_value = response

        resp = await self.api.delete_analysis(
            "foo/bar",
            42,
        )

        self.client.delete.assert_awaited_once_with(
            "/repos/foo/bar/code-scanning/analyses/42",
        )

        self.assertEqual(
            resp["next_analysis_url"],
            "https://api.github.com/repos/octocat/hello-world/code-scanning/analyses/41",
        )
        self.assertEqual(
            resp["confirm_delete_url"],
            "https://api.github.com/repos/octocat/hello-world/code-scanning/analyses/41?confirm_delete",
        )

    async def test_codeql_databases(self):
        response = create_response()
        response.json.return_value = [
            {
                "id": 1,
                "name": "database.zip",
                "language": "java",
                "uploader": {
                    "login": "octocat",
                    "id": 1,
                    "node_id": "MDQ6VXNlcjE=",
                    "avatar_url": "https://github.com/images/error/octocat_happy.gif",
                    "gravatar_id": "",
                    "url": "https://api.github.com/users/octocat",
                    "html_url": "https://github.com/octocat",
                    "followers_url": "https://api.github.com/users/octocat/followers",
                    "following_url": "https://api.github.com/users/octocat/following{/other_user}",
                    "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
                    "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
                    "subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
                    "organizations_url": "https://api.github.com/users/octocat/orgs",
                    "repos_url": "https://api.github.com/users/octocat/repos",
                    "events_url": "https://api.github.com/users/octocat/events{/privacy}",
                    "received_events_url": "https://api.github.com/users/octocat/received_events",
                    "type": "User",
                    "site_admin": False,
                },
                "content_type": "application/zip",
                "size": 1024,
                "created_at": "2022-09-12T12:14:32Z",
                "updated_at": "2022-09-12T12:14:32Z",
                "url": "https://api.github.com/repos/octocat/Hello-World/code-scanning/codeql/databases/java",
                "commit_oid": 12345678901234567000,
            },
            {
                "id": 2,
                "name": "database.zip",
                "language": "ruby",
                "uploader": {
                    "login": "octocat",
                    "id": 1,
                    "node_id": "MDQ6VXNlcjE=",
                    "avatar_url": "https://github.com/images/error/octocat_happy.gif",
                    "gravatar_id": "",
                    "url": "https://api.github.com/users/octocat",
                    "html_url": "https://github.com/octocat",
                    "followers_url": "https://api.github.com/users/octocat/followers",
                    "following_url": "https://api.github.com/users/octocat/following{/other_user}",
                    "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
                    "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
                    "subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
                    "organizations_url": "https://api.github.com/users/octocat/orgs",
                    "repos_url": "https://api.github.com/users/octocat/repos",
                    "events_url": "https://api.github.com/users/octocat/events{/privacy}",
                    "received_events_url": "https://api.github.com/users/octocat/received_events",
                    "type": "User",
                    "site_admin": False,
                },
                "content_type": "application/zip",
                "size": 1024,
                "created_at": "2022-09-12T12:14:32Z",
                "updated_at": "2022-09-12T12:14:32Z",
                "url": "https://api.github.com/repos/octocat/Hello-World/code-scanning/codeql/databases/ruby",
                "commit_oid": 23456789012345680000,
            },
        ]

        self.client.get_all.return_value = AsyncIteratorMock([response])

        async_it = aiter(self.api.codeql_databases("foo/bar"))
        db = await anext(async_it)
        self.assertEqual(db.id, 1)
        alert = await anext(async_it)
        self.assertEqual(alert.id, 2)

        with self.assertRaises(StopAsyncIteration):
            await anext(async_it)

        self.client.get_all.assert_called_once_with(
            "/repos/foo/bar/code-scanning/codeql/databases",
            params={
                "per_page": "100",
            },
        )

    async def test_codeql_database(self):
        response = create_response()
        response.json.return_value = {
            "id": 1,
            "name": "database.zip",
            "language": "java",
            "uploader": {
                "login": "octocat",
                "id": 1,
                "node_id": "MDQ6VXNlcjE=",
                "avatar_url": "https://github.com/images/error/octocat_happy.gif",
                "gravatar_id": "",
                "url": "https://api.github.com/users/octocat",
                "html_url": "https://github.com/octocat",
                "followers_url": "https://api.github.com/users/octocat/followers",
                "following_url": "https://api.github.com/users/octocat/following{/other_user}",
                "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
                "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
                "subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
                "organizations_url": "https://api.github.com/users/octocat/orgs",
                "repos_url": "https://api.github.com/users/octocat/repos",
                "events_url": "https://api.github.com/users/octocat/events{/privacy}",
                "received_events_url": "https://api.github.com/users/octocat/received_events",
                "type": "User",
                "site_admin": False,
            },
            "content_type": "application/zip",
            "size": 1024,
            "created_at": "2022-09-12T12:14:32Z",
            "updated_at": "2022-09-12T12:14:32Z",
            "url": "https://api.github.com/repos/octocat/Hello-World/code-scanning/codeql/databases/java",
            "commit_oid": 12345678901234567000,
        }
        self.client.get.return_value = response

        alert = await self.api.codeql_database(
            "foo/bar",
            "java",
        )

        self.client.get.assert_awaited_once_with(
            "/repos/foo/bar/code-scanning/codeql/databases/java",
        )

        self.assertEqual(alert.id, 1)

    async def test_default_setup(self):
        response = create_response()
        response.json.return_value = {
            "state": "configured",
            "languages": ["ruby", "python"],
            "query_suite": "default",
            "updated_at": "2023-01-19T11:21:34Z",
            "schedule": "weekly",
        }
        self.client.get.return_value = response

        setup = await self.api.default_setup(
            "foo/bar",
        )

        self.client.get.assert_awaited_once_with(
            "/repos/foo/bar/code-scanning/default-setup",
        )

        self.assertEqual(setup.state, DefaultSetupState.CONFIGURED)

    async def test_update_default_setup(self):
        response = create_response()
        response.json.return_value = {
            "run_id": 42,
            "run_url": "https://api.github.com/repos/octoorg/octocat/actions/runs/42",
        }
        self.client.patch.return_value = response

        resp = await self.api.update_default_setup(
            "foo/bar",
            state=DefaultSetupState.CONFIGURED,
            query_suite=QuerySuite.EXTENDED,
            languages=[Language.GO],
        )

        self.client.patch.assert_awaited_once_with(
            "/repos/foo/bar/code-scanning/code-scanning/default-setup",
            data={
                "state": "configured",
                "query_suite": "extended",
                "languages": ["go"],
            },
        )

        self.assertEqual(
            resp["run_id"],
            42,
        )
        self.assertEqual(
            resp["run_url"],
            "https://api.github.com/repos/octoorg/octocat/actions/runs/42",
        )

    async def test_upload_sarif_data(self):
        sarif = {
            "version": "2.1.0",
            "$schema": "http://json.schemastore.org/sarif-2.1.0-rtm.4",
            "runs": [
                {
                    "tool": {
                        "driver": {
                            "name": "ESLint",
                            "informationUri": "https://eslint.org",
                            "rules": [
                                {
                                    "id": "no-unused-vars",
                                    "shortDescription": {
                                        "text": "disallow unused variables"
                                    },
                                    "helpUri": "https://eslint.org/docs/rules/no-unused-vars",
                                    "properties": {"category": "Variables"},
                                }
                            ],
                        }
                    },
                    "artifacts": [
                        {
                            "location": {
                                "uri": "file:///C:/dev/sarif/sarif-tutorials/samples/Introduction/simple-example.js"
                            }
                        }
                    ],
                    "results": [
                        {
                            "level": "error",
                            "message": {
                                "text": "'x' is assigned a value but never used."
                            },
                            "locations": [
                                {
                                    "physicalLocation": {
                                        "artifactLocation": {
                                            "uri": "file:///C:/dev/sarif/sarif-tutorials/samples/Introduction/simple-example.js",
                                            "index": 0,
                                        },
                                        "region": {
                                            "startLine": 1,
                                            "startColumn": 5,
                                        },
                                    }
                                }
                            ],
                            "ruleId": "no-unused-vars",
                            "ruleIndex": 0,
                        }
                    ],
                }
            ],
        }

        response = create_response()
        response.json.return_value = {
            "id": "47177e22-5596-11eb-80a1-c1e54ef945c6",
            "url": "https://api.github.com/repos/octocat/hello-world/code-scanning/sarifs/47177e22-5596-11eb-80a1-c1e54ef945c6",
        }
        self.client.post.return_value = response

        resp = await self.api.upload_sarif_data(
            "foo/bar",
            commit_sha="4b6472266afd7b471e86085a6659e8c7f2b119da",
            ref="refs/heads/master",
            sarif=json.dumps(sarif).encode(),
        )

        self.assertEqual(self.client.post.await_count, 1)
        args = self.client.post.await_args
        self.assertEqual(args.args, ("/repos/foo/bar/code-scanning/sarifs",))
        data = args.kwargs["data"]
        self.assertEqual(
            data["commit_sha"],
            "4b6472266afd7b471e86085a6659e8c7f2b119da",
        )
        self.assertEqual(
            data["ref"],
            "refs/heads/master",
        )
        # it's not possible to check the sarif data in Python < 3.11 because
        # gzip creates different content on each run
        self.assertTrue("sarif" in data)

        self.assertEqual(resp["id"], "47177e22-5596-11eb-80a1-c1e54ef945c6")
        self.assertEqual(
            resp["url"],
            "https://api.github.com/repos/octocat/hello-world/code-scanning/sarifs/47177e22-5596-11eb-80a1-c1e54ef945c6",
        )

    async def test_sarif(self):
        response = create_response()
        response.json.return_value = {
            "processing_status": "complete",
            "analyses_url": "https://api.github.com/repos/octocat/hello-world/code-scanning/analyses?sarif_id=47177e22-5596-11eb-80a1-c1e54ef945c6",
        }
        self.client.get.return_value = response

        resp = await self.api.sarif(
            "foo/bar", "47177e22-5596-11eb-80a1-c1e54ef945c6"
        )
        self.client.get.assert_awaited_once_with(
            "/repos/foo/bar/code-scanning/sarifs/47177e22-5596-11eb-80a1-c1e54ef945c6",
        )

        self.assertEqual(resp.processing_status, SarifProcessingStatus.COMPLETE)
        self.assertEqual(
            resp.analyses_url,
            "https://api.github.com/repos/octocat/hello-world/code-scanning/analyses?sarif_id=47177e22-5596-11eb-80a1-c1e54ef945c6",
        )
        self.assertIsNone(resp.errors)
pontos-25.3.2/tests/github/api/test_contents.py000066400000000000000000000026741476255566300216240ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later
#

from pathlib import Path

from pontos.github.api.contents import GitHubAsyncRESTContent
from tests.github.api import GitHubAsyncRESTTestCase, create_response

here = Path(__file__).parent


class GitHubAsyncRESTContentTestCase(GitHubAsyncRESTTestCase):
    api_cls = GitHubAsyncRESTContent

    async def test_path_exists(self):
        response = create_response(is_success=True)
        self.client.get.return_value = response

        self.assertTrue(await self.api.path_exists("foo/bar", "a/file.txt"))

        self.client.get.assert_awaited_once_with(
            "/repos/foo/bar/contents/a/file.txt", params={}
        )

    async def test_path_exists_with_branch(self):
        response = create_response(is_success=True)
        self.client.get.return_value = response

        self.assertTrue(
            await self.api.path_exists("foo/bar", "a/file.txt", branch="baz")
        )

        self.client.get.assert_awaited_once_with(
            "/repos/foo/bar/contents/a/file.txt", params={"ref": "baz"}
        )

    async def test_path_not_exists(self):
        response = create_response(is_success=False)
        self.client.get.return_value = response

        self.assertFalse(await self.api.path_exists("foo/bar", "a/file.txt"))

        self.client.get.assert_awaited_once_with(
            "/repos/foo/bar/contents/a/file.txt", params={}
        )
pontos-25.3.2/tests/github/api/test_dependabot.py000066400000000000000000001267501476255566300220760ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later

# ruff: noqa:E501

from pontos.github.api.dependabot import GitHubAsyncRESTDependabot
from pontos.github.models.base import SortOrder
from pontos.github.models.dependabot import (
    AlertSort,
    AlertState,
    DependencyScope,
    DismissedReason,
    Severity,
)
from tests import AsyncIteratorMock, aiter, anext
from tests.github.api import GitHubAsyncRESTTestCase, create_response

ALERTS = [
    {
        "number": 2,
        "state": "dismissed",
        "dependency": {
            "package": {"ecosystem": "pip", "name": "django"},
            "manifest_path": "path/to/requirements.txt",
            "scope": "runtime",
        },
        "security_advisory": {
            "ghsa_id": "GHSA-rf4j-j272-fj86",
            "cve_id": "CVE-2018-6188",
            "summary": "Django allows remote attackers to obtain potentially sensitive information by leveraging data exposure from the confirm_login_allowed() method, as demonstrated by discovering whether a user account is inactive",
            "description": "django.contrib.auth.forms.AuthenticationForm in Django 2.0 before 2.0.2, and 1.11.8 and 1.11.9, allows remote attackers to obtain potentially sensitive information by leveraging data exposure from the confirm_login_allowed() method, as demonstrated by discovering whether a user account is inactive.",
            "vulnerabilities": [
                {
                    "package": {"ecosystem": "pip", "name": "django"},
                    "severity": "high",
                    "vulnerable_version_range": ">= 2.0.0, < 2.0.2",
                    "first_patched_version": {"identifier": "2.0.2"},
                },
                {
                    "package": {"ecosystem": "pip", "name": "django"},
                    "severity": "high",
                    "vulnerable_version_range": ">= 1.11.8, < 1.11.10",
                    "first_patched_version": {"identifier": "1.11.10"},
                },
            ],
            "severity": "high",
            "cvss": {
                "vector_string": "CVSS:3.0/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:N/A:N",
                "score": 7.5,
            },
            "cwes": [
                {
                    "cwe_id": "CWE-200",
                    "name": "Exposure of Sensitive Information to an Unauthorized Actor",
                }
            ],
            "identifiers": [
                {"type": "GHSA", "value": "GHSA-rf4j-j272-fj86"},
                {"type": "CVE", "value": "CVE-2018-6188"},
            ],
            "references": [
                {"url": "https://nvd.nist.gov/vuln/detail/CVE-2018-6188"},
                {"url": "https://github.com/advisories/GHSA-rf4j-j272-fj86"},
                {"url": "https://usn.ubuntu.com/3559-1/"},
                {
                    "url": "https://www.djangoproject.com/weblog/2018/feb/01/security-releases/"
                },
                {"url": "http://www.securitytracker.com/id/1040422"},
            ],
            "published_at": "2018-10-03T21:13:54Z",
            "updated_at": "2022-04-26T18:35:37Z",
            "withdrawn_at": None,
        },
        "security_vulnerability": {
            "package": {"ecosystem": "pip", "name": "django"},
            "severity": "high",
            "vulnerable_version_range": ">= 2.0.0, < 2.0.2",
            "first_patched_version": {"identifier": "2.0.2"},
        },
        "url": "https://api.github.com/repos/octo-org/octo-repo/dependabot/alerts/2",
        "html_url": "https://github.com/octo-org/octo-repo/security/dependabot/2",
        "created_at": "2022-06-15T07:43:03Z",
        "updated_at": "2022-08-23T14:29:47Z",
        "dismissed_at": "2022-08-23T14:29:47Z",
        "dismissed_by": {
            "login": "octocat",
            "id": 1,
            "node_id": "MDQ6VXNlcjE=",
            "avatar_url": "https://github.com/images/error/octocat_happy.gif",
            "gravatar_id": "",
            "url": "https://api.github.com/users/octocat",
            "html_url": "https://github.com/octocat",
            "followers_url": "https://api.github.com/users/octocat/followers",
            "following_url": "https://api.github.com/users/octocat/following{/other_user}",
            "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
            "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
            "subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
            "organizations_url": "https://api.github.com/users/octocat/orgs",
            "repos_url": "https://api.github.com/users/octocat/repos",
            "events_url": "https://api.github.com/users/octocat/events{/privacy}",
            "received_events_url": "https://api.github.com/users/octocat/received_events",
            "type": "User",
            "site_admin": False,
        },
        "dismissed_reason": "tolerable_risk",
        "dismissed_comment": "This alert is accurate but we use a sanitizer.",
        "fixed_at": None,
        "repository": {
            "id": 217723378,
            "node_id": "MDEwOlJlcG9zaXRvcnkyMTc3MjMzNzg=",
            "name": "octo-repo",
            "full_name": "octo-org/octo-repo",
            "owner": {
                "login": "octo-org",
                "id": 6811672,
                "node_id": "MDEyOk9yZ2FuaXphdGlvbjY4MTE2NzI=",
                "avatar_url": "https://avatars3.githubusercontent.com/u/6811672?v=4",
                "gravatar_id": "",
                "url": "https://api.github.com/users/octo-org",
                "html_url": "https://github.com/octo-org",
                "followers_url": "https://api.github.com/users/octo-org/followers",
                "following_url": "https://api.github.com/users/octo-org/following{/other_user}",
                "gists_url": "https://api.github.com/users/octo-org/gists{/gist_id}",
                "starred_url": "https://api.github.com/users/octo-org/starred{/owner}{/repo}",
                "subscriptions_url": "https://api.github.com/users/octo-org/subscriptions",
                "organizations_url": "https://api.github.com/users/octo-org/orgs",
                "repos_url": "https://api.github.com/users/octo-org/repos",
                "events_url": "https://api.github.com/users/octo-org/events{/privacy}",
                "received_events_url": "https://api.github.com/users/octo-org/received_events",
                "type": "Organization",
                "site_admin": False,
            },
            "private": True,
            "html_url": "https://github.com/octo-org/octo-repo",
            "description": None,
            "fork": False,
            "url": "https://api.github.com/repos/octo-org/octo-repo",
            "archive_url": "https://api.github.com/repos/octo-org/octo-repo/{archive_format}{/ref}",
            "assignees_url": "https://api.github.com/repos/octo-org/octo-repo/assignees{/user}",
            "blobs_url": "https://api.github.com/repos/octo-org/octo-repo/git/blobs{/sha}",
            "branches_url": "https://api.github.com/repos/octo-org/octo-repo/branches{/branch}",
            "collaborators_url": "https://api.github.com/repos/octo-org/octo-repo/collaborators{/collaborator}",
            "comments_url": "https://api.github.com/repos/octo-org/octo-repo/comments{/number}",
            "commits_url": "https://api.github.com/repos/octo-org/octo-repo/commits{/sha}",
            "compare_url": "https://api.github.com/repos/octo-org/octo-repo/compare/{base}...{head}",
            "contents_url": "https://api.github.com/repos/octo-org/octo-repo/contents/{+path}",
            "contributors_url": "https://api.github.com/repos/octo-org/octo-repo/contributors",
            "deployments_url": "https://api.github.com/repos/octo-org/octo-repo/deployments",
            "downloads_url": "https://api.github.com/repos/octo-org/octo-repo/downloads",
            "events_url": "https://api.github.com/repos/octo-org/octo-repo/events",
            "forks_url": "https://api.github.com/repos/octo-org/octo-repo/forks",
            "git_commits_url": "https://api.github.com/repos/octo-org/octo-repo/git/commits{/sha}",
            "git_refs_url": "https://api.github.com/repos/octo-org/octo-repo/git/refs{/sha}",
            "git_tags_url": "https://api.github.com/repos/octo-org/octo-repo/git/tags{/sha}",
            "hooks_url": "https://api.github.com/repos/octo-org/octo-repo/hooks",
            "issue_comment_url": "https://api.github.com/repos/octo-org/octo-repo/issues/comments{/number}",
            "issue_events_url": "https://api.github.com/repos/octo-org/octo-repo/issues/events{/number}",
            "issues_url": "https://api.github.com/repos/octo-org/octo-repo/issues{/number}",
            "keys_url": "https://api.github.com/repos/octo-org/octo-repo/keys{/key_id}",
            "labels_url": "https://api.github.com/repos/octo-org/octo-repo/labels{/name}",
            "languages_url": "https://api.github.com/repos/octo-org/octo-repo/languages",
            "merges_url": "https://api.github.com/repos/octo-org/octo-repo/merges",
            "milestones_url": "https://api.github.com/repos/octo-org/octo-repo/milestones{/number}",
            "notifications_url": "https://api.github.com/repos/octo-org/octo-repo/notifications{?since,all,participating}",
            "pulls_url": "https://api.github.com/repos/octo-org/octo-repo/pulls{/number}",
            "releases_url": "https://api.github.com/repos/octo-org/octo-repo/releases{/id}",
            "stargazers_url": "https://api.github.com/repos/octo-org/octo-repo/stargazers",
            "statuses_url": "https://api.github.com/repos/octo-org/octo-repo/statuses/{sha}",
            "subscribers_url": "https://api.github.com/repos/octo-org/octo-repo/subscribers",
            "subscription_url": "https://api.github.com/repos/octo-org/octo-repo/subscription",
            "tags_url": "https://api.github.com/repos/octo-org/octo-repo/tags",
            "teams_url": "https://api.github.com/repos/octo-org/octo-repo/teams",
            "trees_url": "https://api.github.com/repos/octo-org/octo-repo/git/trees{/sha}",
        },
    },
    {
        "number": 1,
        "state": "open",
        "dependency": {
            "package": {"ecosystem": "pip", "name": "ansible"},
            "manifest_path": "path/to/requirements.txt",
            "scope": "runtime",
        },
        "security_advisory": {
            "ghsa_id": "GHSA-8f4m-hccc-8qph",
            "cve_id": "CVE-2021-20191",
            "summary": "Insertion of Sensitive Information into Log File in ansible",
            "description": "A flaw was found in ansible. Credentials, such as secrets, are being disclosed in console log by default and not protected by no_log feature when using those modules. An attacker can take advantage of this information to steal those credentials. The highest threat from this vulnerability is to data confidentiality.",
            "vulnerabilities": [
                {
                    "package": {"ecosystem": "pip", "name": "ansible"},
                    "severity": "medium",
                    "vulnerable_version_range": ">= 2.9.0, < 2.9.18",
                    "first_patched_version": {"identifier": "2.9.18"},
                },
                {
                    "package": {"ecosystem": "pip", "name": "ansible"},
                    "severity": "medium",
                    "vulnerable_version_range": "< 2.8.19",
                    "first_patched_version": {"identifier": "2.8.19"},
                },
                {
                    "package": {"ecosystem": "pip", "name": "ansible"},
                    "severity": "medium",
                    "vulnerable_version_range": ">= 2.10.0, < 2.10.7",
                    "first_patched_version": {"identifier": "2.10.7"},
                },
            ],
            "severity": "medium",
            "cvss": {
                "vector_string": "CVSS:3.1/AV:L/AC:L/PR:L/UI:N/S:U/C:H/I:N/A:N",
                "score": 5.5,
            },
            "cwes": [
                {
                    "cwe_id": "CWE-532",
                    "name": "Insertion of Sensitive Information into Log File",
                }
            ],
            "identifiers": [
                {"type": "GHSA", "value": "GHSA-8f4m-hccc-8qph"},
                {"type": "CVE", "value": "CVE-2021-20191"},
            ],
            "references": [
                {"url": "https://nvd.nist.gov/vuln/detail/CVE-2021-20191"},
                {
                    "url": "https://access.redhat.com/security/cve/cve-2021-20191"
                },
                {"url": "https://bugzilla.redhat.com/show_bug.cgi?id=1916813"},
            ],
            "published_at": "2021-06-01T17:38:00Z",
            "updated_at": "2021-08-12T23:06:00Z",
            "withdrawn_at": None,
        },
        "security_vulnerability": {
            "package": {"ecosystem": "pip", "name": "ansible"},
            "severity": "medium",
            "vulnerable_version_range": "< 2.8.19",
            "first_patched_version": {"identifier": "2.8.19"},
        },
        "url": "https://api.github.com/repos/octo-org/hello-world/dependabot/alerts/1",
        "html_url": "https://github.com/octo-org/hello-world/security/dependabot/1",
        "created_at": "2022-06-14T15:21:52Z",
        "updated_at": "2022-06-14T15:21:52Z",
        "dismissed_at": None,
        "dismissed_by": None,
        "dismissed_reason": None,
        "dismissed_comment": None,
        "fixed_at": None,
        "repository": {
            "id": 664700648,
            "node_id": "MDEwOlJlcG9zaXRvcnk2NjQ3MDA2NDg=",
            "name": "hello-world",
            "full_name": "octo-org/hello-world",
            "owner": {
                "login": "octo-org",
                "id": 6811672,
                "node_id": "MDEyOk9yZ2FuaXphdGlvbjY4MTE2NzI=",
                "avatar_url": "https://avatars3.githubusercontent.com/u/6811672?v=4",
                "gravatar_id": "",
                "url": "https://api.github.com/users/octo-org",
                "html_url": "https://github.com/octo-org",
                "followers_url": "https://api.github.com/users/octo-org/followers",
                "following_url": "https://api.github.com/users/octo-org/following{/other_user}",
                "gists_url": "https://api.github.com/users/octo-org/gists{/gist_id}",
                "starred_url": "https://api.github.com/users/octo-org/starred{/owner}{/repo}",
                "subscriptions_url": "https://api.github.com/users/octo-org/subscriptions",
                "organizations_url": "https://api.github.com/users/octo-org/orgs",
                "repos_url": "https://api.github.com/users/octo-org/repos",
                "events_url": "https://api.github.com/users/octo-org/events{/privacy}",
                "received_events_url": "https://api.github.com/users/octo-org/received_events",
                "type": "Organization",
                "site_admin": False,
            },
            "private": True,
            "html_url": "https://github.com/octo-org/hello-world",
            "description": None,
            "fork": False,
            "url": "https://api.github.com/repos/octo-org/hello-world",
            "archive_url": "https://api.github.com/repos/octo-org/hello-world/{archive_format}{/ref}",
            "assignees_url": "https://api.github.com/repos/octo-org/hello-world/assignees{/user}",
            "blobs_url": "https://api.github.com/repos/octo-org/hello-world/git/blobs{/sha}",
            "branches_url": "https://api.github.com/repos/octo-org/hello-world/branches{/branch}",
            "collaborators_url": "https://api.github.com/repos/octo-org/hello-world/collaborators{/collaborator}",
            "comments_url": "https://api.github.com/repos/octo-org/hello-world/comments{/number}",
            "commits_url": "https://api.github.com/repos/octo-org/hello-world/commits{/sha}",
            "compare_url": "https://api.github.com/repos/octo-org/hello-world/compare/{base}...{head}",
            "contents_url": "https://api.github.com/repos/octo-org/hello-world/contents/{+path}",
            "contributors_url": "https://api.github.com/repos/octo-org/hello-world/contributors",
            "deployments_url": "https://api.github.com/repos/octo-org/hello-world/deployments",
            "downloads_url": "https://api.github.com/repos/octo-org/hello-world/downloads",
            "events_url": "https://api.github.com/repos/octo-org/hello-world/events",
            "forks_url": "https://api.github.com/repos/octo-org/hello-world/forks",
            "git_commits_url": "https://api.github.com/repos/octo-org/hello-world/git/commits{/sha}",
            "git_refs_url": "https://api.github.com/repos/octo-org/hello-world/git/refs{/sha}",
            "git_tags_url": "https://api.github.com/repos/octo-org/hello-world/git/tags{/sha}",
            "hooks_url": "https://api.github.com/repos/octo-org/hello-world/hooks",
            "issue_comment_url": "https://api.github.com/repos/octo-org/hello-world/issues/comments{/number}",
            "issue_events_url": "https://api.github.com/repos/octo-org/hello-world/issues/events{/number}",
            "issues_url": "https://api.github.com/repos/octo-org/hello-world/issues{/number}",
            "keys_url": "https://api.github.com/repos/octo-org/hello-world/keys{/key_id}",
            "labels_url": "https://api.github.com/repos/octo-org/hello-world/labels{/name}",
            "languages_url": "https://api.github.com/repos/octo-org/hello-world/languages",
            "merges_url": "https://api.github.com/repos/octo-org/hello-world/merges",
            "milestones_url": "https://api.github.com/repos/octo-org/hello-world/milestones{/number}",
            "notifications_url": "https://api.github.com/repos/octo-org/hello-world/notifications{?since,all,participating}",
            "pulls_url": "https://api.github.com/repos/octo-org/hello-world/pulls{/number}",
            "releases_url": "https://api.github.com/repos/octo-org/hello-world/releases{/id}",
            "stargazers_url": "https://api.github.com/repos/octo-org/hello-world/stargazers",
            "statuses_url": "https://api.github.com/repos/octo-org/hello-world/statuses/{sha}",
            "subscribers_url": "https://api.github.com/repos/octo-org/hello-world/subscribers",
            "subscription_url": "https://api.github.com/repos/octo-org/hello-world/subscription",
            "tags_url": "https://api.github.com/repos/octo-org/hello-world/tags",
            "teams_url": "https://api.github.com/repos/octo-org/hello-world/teams",
            "trees_url": "https://api.github.com/repos/octo-org/hello-world/git/trees{/sha}",
        },
    },
]


class GitHubAsyncRESTDependabotTestCase(GitHubAsyncRESTTestCase):
    api_cls = GitHubAsyncRESTDependabot

    async def test_enterprise_alerts(self):
        response = create_response()
        response.json.return_value = ALERTS

        self.client.get_all.return_value = AsyncIteratorMock([response])

        async_it = aiter(self.api.enterprise_alerts("foo"))
        alert = await anext(async_it)
        self.assertEqual(alert.number, 2)
        alert = await anext(async_it)
        self.assertEqual(alert.number, 1)

        with self.assertRaises(StopAsyncIteration):
            await anext(async_it)

        self.client.get_all.assert_called_once_with(
            "/enterprises/foo/dependabot/alerts",
            params={"per_page": "100", "sort": "created", "direction": "desc"},
        )

    async def test_enterprise_alerts_state(self):
        response = create_response()
        response.json.return_value = ALERTS

        self.client.get_all.return_value = AsyncIteratorMock([response])

        async_it = aiter(
            self.api.enterprise_alerts("foo", state=AlertState.FIXED)
        )
        alert = await anext(async_it)
        self.assertEqual(alert.number, 2)
        alert = await anext(async_it)
        self.assertEqual(alert.number, 1)

        with self.assertRaises(StopAsyncIteration):
            await anext(async_it)

        self.client.get_all.assert_called_once_with(
            "/enterprises/foo/dependabot/alerts",
            params={
                "per_page": "100",
                "sort": "created",
                "direction": "desc",
                "state": "fixed",
            },
        )

    async def test_enterprise_alerts_severity(self):
        response = create_response()
        response.json.return_value = ALERTS

        self.client.get_all.return_value = AsyncIteratorMock([response])

        async_it = aiter(
            self.api.enterprise_alerts("foo", severity=Severity.CRITICAL)
        )
        alert = await anext(async_it)
        self.assertEqual(alert.number, 2)
        alert = await anext(async_it)
        self.assertEqual(alert.number, 1)

        with self.assertRaises(StopAsyncIteration):
            await anext(async_it)

        self.client.get_all.assert_called_once_with(
            "/enterprises/foo/dependabot/alerts",
            params={
                "per_page": "100",
                "sort": "created",
                "direction": "desc",
                "severity": "critical",
            },
        )

    async def test_enterprise_alerts_ecosystem(self):
        response = create_response()
        response.json.return_value = ALERTS

        self.client.get_all.return_value = AsyncIteratorMock([response])

        async_it = aiter(self.api.enterprise_alerts("foo", ecosystem="pip"))
        alert = await anext(async_it)
        self.assertEqual(alert.number, 2)
        alert = await anext(async_it)
        self.assertEqual(alert.number, 1)

        with self.assertRaises(StopAsyncIteration):
            await anext(async_it)

        self.client.get_all.assert_called_once_with(
            "/enterprises/foo/dependabot/alerts",
            params={
                "per_page": "100",
                "sort": "created",
                "direction": "desc",
                "ecosystem": "pip",
            },
        )

    async def test_enterprise_alerts_scope(self):
        response = create_response()
        response.json.return_value = ALERTS

        self.client.get_all.return_value = AsyncIteratorMock([response])

        async_it = aiter(
            self.api.enterprise_alerts("foo", scope=DependencyScope.RUNTIME)
        )
        alert = await anext(async_it)
        self.assertEqual(alert.number, 2)
        alert = await anext(async_it)
        self.assertEqual(alert.number, 1)

        with self.assertRaises(StopAsyncIteration):
            await anext(async_it)

        self.client.get_all.assert_called_once_with(
            "/enterprises/foo/dependabot/alerts",
            params={
                "per_page": "100",
                "sort": "created",
                "direction": "desc",
                "scope": "runtime",
            },
        )

    async def test_enterprise_alerts_sort(self):
        response = create_response()
        response.json.return_value = ALERTS

        self.client.get_all.return_value = AsyncIteratorMock([response])

        async_it = aiter(
            self.api.enterprise_alerts("foo", sort=AlertSort.UPDATED)
        )
        alert = await anext(async_it)
        self.assertEqual(alert.number, 2)
        alert = await anext(async_it)
        self.assertEqual(alert.number, 1)

        with self.assertRaises(StopAsyncIteration):
            await anext(async_it)

        self.client.get_all.assert_called_once_with(
            "/enterprises/foo/dependabot/alerts",
            params={
                "per_page": "100",
                "sort": "updated",
                "direction": "desc",
            },
        )

    async def test_enterprise_alerts_direction(self):
        response = create_response()
        response.json.return_value = ALERTS

        self.client.get_all.return_value = AsyncIteratorMock([response])

        async_it = aiter(
            self.api.enterprise_alerts("foo", direction=SortOrder.ASC)
        )
        alert = await anext(async_it)
        self.assertEqual(alert.number, 2)
        alert = await anext(async_it)
        self.assertEqual(alert.number, 1)

        with self.assertRaises(StopAsyncIteration):
            await anext(async_it)

        self.client.get_all.assert_called_once_with(
            "/enterprises/foo/dependabot/alerts",
            params={
                "per_page": "100",
                "sort": "created",
                "direction": "asc",
            },
        )

    async def test_organization_alerts(self):
        response = create_response()
        response.json.return_value = ALERTS

        self.client.get_all.return_value = AsyncIteratorMock([response])

        async_it = aiter(self.api.organization_alerts("foo"))
        alert = await anext(async_it)
        self.assertEqual(alert.number, 2)
        alert = await anext(async_it)
        self.assertEqual(alert.number, 1)

        with self.assertRaises(StopAsyncIteration):
            await anext(async_it)

        self.client.get_all.assert_called_once_with(
            "/orgs/foo/dependabot/alerts",
            params={"per_page": "100", "sort": "created", "direction": "desc"},
        )

    async def test_organization_alerts_state(self):
        response = create_response()
        response.json.return_value = ALERTS

        self.client.get_all.return_value = AsyncIteratorMock([response])

        async_it = aiter(
            self.api.organization_alerts("foo", state=AlertState.FIXED)
        )
        alert = await anext(async_it)
        self.assertEqual(alert.number, 2)
        alert = await anext(async_it)
        self.assertEqual(alert.number, 1)

        with self.assertRaises(StopAsyncIteration):
            await anext(async_it)

        self.client.get_all.assert_called_once_with(
            "/orgs/foo/dependabot/alerts",
            params={
                "per_page": "100",
                "sort": "created",
                "direction": "desc",
                "state": "fixed",
            },
        )

    async def test_organization_alerts_severity(self):
        response = create_response()
        response.json.return_value = ALERTS

        self.client.get_all.return_value = AsyncIteratorMock([response])

        async_it = aiter(
            self.api.organization_alerts("foo", severity=Severity.CRITICAL)
        )
        alert = await anext(async_it)
        self.assertEqual(alert.number, 2)
        alert = await anext(async_it)
        self.assertEqual(alert.number, 1)

        with self.assertRaises(StopAsyncIteration):
            await anext(async_it)

        self.client.get_all.assert_called_once_with(
            "/orgs/foo/dependabot/alerts",
            params={
                "per_page": "100",
                "sort": "created",
                "direction": "desc",
                "severity": "critical",
            },
        )

    async def test_organization_alerts_ecosystem(self):
        response = create_response()
        response.json.return_value = ALERTS

        self.client.get_all.return_value = AsyncIteratorMock([response])

        async_it = aiter(self.api.organization_alerts("foo", ecosystem="pip"))
        alert = await anext(async_it)
        self.assertEqual(alert.number, 2)
        alert = await anext(async_it)
        self.assertEqual(alert.number, 1)

        with self.assertRaises(StopAsyncIteration):
            await anext(async_it)

        self.client.get_all.assert_called_once_with(
            "/orgs/foo/dependabot/alerts",
            params={
                "per_page": "100",
                "sort": "created",
                "direction": "desc",
                "ecosystem": "pip",
            },
        )

    async def test_organization_alerts_scope(self):
        response = create_response()
        response.json.return_value = ALERTS

        self.client.get_all.return_value = AsyncIteratorMock([response])

        async_it = aiter(
            self.api.organization_alerts("foo", scope=DependencyScope.RUNTIME)
        )
        alert = await anext(async_it)
        self.assertEqual(alert.number, 2)
        alert = await anext(async_it)
        self.assertEqual(alert.number, 1)

        with self.assertRaises(StopAsyncIteration):
            await anext(async_it)

        self.client.get_all.assert_called_once_with(
            "/orgs/foo/dependabot/alerts",
            params={
                "per_page": "100",
                "sort": "created",
                "direction": "desc",
                "scope": "runtime",
            },
        )

    async def test_organization_alerts_sort(self):
        response = create_response()
        response.json.return_value = ALERTS

        self.client.get_all.return_value = AsyncIteratorMock([response])

        async_it = aiter(
            self.api.organization_alerts("foo", sort=AlertSort.UPDATED)
        )
        alert = await anext(async_it)
        self.assertEqual(alert.number, 2)
        alert = await anext(async_it)
        self.assertEqual(alert.number, 1)

        with self.assertRaises(StopAsyncIteration):
            await anext(async_it)

        self.client.get_all.assert_called_once_with(
            "/orgs/foo/dependabot/alerts",
            params={
                "per_page": "100",
                "sort": "updated",
                "direction": "desc",
            },
        )

    async def test_organization_alerts_direction(self):
        response = create_response()
        response.json.return_value = ALERTS

        self.client.get_all.return_value = AsyncIteratorMock([response])

        async_it = aiter(
            self.api.organization_alerts("foo", direction=SortOrder.ASC)
        )
        alert = await anext(async_it)
        self.assertEqual(alert.number, 2)
        alert = await anext(async_it)
        self.assertEqual(alert.number, 1)

        with self.assertRaises(StopAsyncIteration):
            await anext(async_it)

        self.client.get_all.assert_called_once_with(
            "/orgs/foo/dependabot/alerts",
            params={
                "per_page": "100",
                "sort": "created",
                "direction": "asc",
            },
        )

    async def test_alerts(self):
        response = create_response()
        response.json.return_value = ALERTS

        self.client.get_all.return_value = AsyncIteratorMock([response])

        async_it = aiter(self.api.alerts("foo/bar"))
        alert = await anext(async_it)
        self.assertEqual(alert.number, 2)
        alert = await anext(async_it)
        self.assertEqual(alert.number, 1)

        with self.assertRaises(StopAsyncIteration):
            await anext(async_it)

        self.client.get_all.assert_called_once_with(
            "/repos/foo/bar/dependabot/alerts",
            params={"per_page": "100", "sort": "created", "direction": "desc"},
        )

    async def test_alerts_state(self):
        response = create_response()
        response.json.return_value = ALERTS

        self.client.get_all.return_value = AsyncIteratorMock([response])

        async_it = aiter(self.api.alerts("foo/bar", state=AlertState.FIXED))
        alert = await anext(async_it)
        self.assertEqual(alert.number, 2)
        alert = await anext(async_it)
        self.assertEqual(alert.number, 1)

        with self.assertRaises(StopAsyncIteration):
            await anext(async_it)

        self.client.get_all.assert_called_once_with(
            "/repos/foo/bar/dependabot/alerts",
            params={
                "per_page": "100",
                "sort": "created",
                "direction": "desc",
                "state": "fixed",
            },
        )

    async def test_alerts_severity(self):
        response = create_response()
        response.json.return_value = ALERTS

        self.client.get_all.return_value = AsyncIteratorMock([response])

        async_it = aiter(self.api.alerts("foo/bar", severity=Severity.CRITICAL))
        alert = await anext(async_it)
        self.assertEqual(alert.number, 2)
        alert = await anext(async_it)
        self.assertEqual(alert.number, 1)

        with self.assertRaises(StopAsyncIteration):
            await anext(async_it)

        self.client.get_all.assert_called_once_with(
            "/repos/foo/bar/dependabot/alerts",
            params={
                "per_page": "100",
                "sort": "created",
                "direction": "desc",
                "severity": "critical",
            },
        )

    async def test_alerts_ecosystem(self):
        response = create_response()
        response.json.return_value = ALERTS

        self.client.get_all.return_value = AsyncIteratorMock([response])

        async_it = aiter(self.api.alerts("foo/bar", ecosystem="pip"))
        alert = await anext(async_it)
        self.assertEqual(alert.number, 2)
        alert = await anext(async_it)
        self.assertEqual(alert.number, 1)

        with self.assertRaises(StopAsyncIteration):
            await anext(async_it)

        self.client.get_all.assert_called_once_with(
            "/repos/foo/bar/dependabot/alerts",
            params={
                "per_page": "100",
                "sort": "created",
                "direction": "desc",
                "ecosystem": "pip",
            },
        )

    async def test_alerts_scope(self):
        response = create_response()
        response.json.return_value = ALERTS

        self.client.get_all.return_value = AsyncIteratorMock([response])

        async_it = aiter(
            self.api.alerts("foo/bar", scope=DependencyScope.RUNTIME)
        )
        alert = await anext(async_it)
        self.assertEqual(alert.number, 2)
        alert = await anext(async_it)
        self.assertEqual(alert.number, 1)

        with self.assertRaises(StopAsyncIteration):
            await anext(async_it)

        self.client.get_all.assert_called_once_with(
            "/repos/foo/bar/dependabot/alerts",
            params={
                "per_page": "100",
                "sort": "created",
                "direction": "desc",
                "scope": "runtime",
            },
        )

    async def test_alerts_sort(self):
        response = create_response()
        response.json.return_value = ALERTS

        self.client.get_all.return_value = AsyncIteratorMock([response])

        async_it = aiter(self.api.alerts("foo/bar", sort=AlertSort.UPDATED))
        alert = await anext(async_it)
        self.assertEqual(alert.number, 2)
        alert = await anext(async_it)
        self.assertEqual(alert.number, 1)

        with self.assertRaises(StopAsyncIteration):
            await anext(async_it)

        self.client.get_all.assert_called_once_with(
            "/repos/foo/bar/dependabot/alerts",
            params={
                "per_page": "100",
                "sort": "updated",
                "direction": "desc",
            },
        )

    async def test_alerts_direction(self):
        response = create_response()
        response.json.return_value = ALERTS

        self.client.get_all.return_value = AsyncIteratorMock([response])

        async_it = aiter(self.api.alerts("foo/bar", direction=SortOrder.ASC))
        alert = await anext(async_it)
        self.assertEqual(alert.number, 2)
        alert = await anext(async_it)
        self.assertEqual(alert.number, 1)

        with self.assertRaises(StopAsyncIteration):
            await anext(async_it)

        self.client.get_all.assert_called_once_with(
            "/repos/foo/bar/dependabot/alerts",
            params={
                "per_page": "100",
                "sort": "created",
                "direction": "asc",
            },
        )

    async def test_alert(self):
        response = create_response()
        response.json.return_value = {
            "number": 1,
            "state": "open",
            "dependency": {
                "package": {"ecosystem": "pip", "name": "ansible"},
                "manifest_path": "path/to/requirements.txt",
                "scope": "runtime",
            },
            "security_advisory": {
                "ghsa_id": "GHSA-8f4m-hccc-8qph",
                "cve_id": "CVE-2021-20191",
                "summary": "Insertion of Sensitive Information into Log File in ansible",
                "description": "A flaw was found in ansible. Credentials, such as secrets, are being disclosed in console log by default and not protected by no_log feature when using those modules. An attacker can take advantage of this information to steal those credentials. The highest threat from this vulnerability is to data confidentiality.",
                "vulnerabilities": [
                    {
                        "package": {"ecosystem": "pip", "name": "ansible"},
                        "severity": "medium",
                        "vulnerable_version_range": ">= 2.9.0, < 2.9.18",
                        "first_patched_version": {"identifier": "2.9.18"},
                    },
                    {
                        "package": {"ecosystem": "pip", "name": "ansible"},
                        "severity": "medium",
                        "vulnerable_version_range": "< 2.8.19",
                        "first_patched_version": {"identifier": "2.8.19"},
                    },
                    {
                        "package": {"ecosystem": "pip", "name": "ansible"},
                        "severity": "medium",
                        "vulnerable_version_range": ">= 2.10.0, < 2.10.7",
                        "first_patched_version": {"identifier": "2.10.7"},
                    },
                ],
                "severity": "medium",
                "cvss": {
                    "vector_string": "CVSS:3.1/AV:L/AC:L/PR:L/UI:N/S:U/C:H/I:N/A:N",
                    "score": 5.5,
                },
                "cwes": [
                    {
                        "cwe_id": "CWE-532",
                        "name": "Insertion of Sensitive Information into Log File",
                    }
                ],
                "identifiers": [
                    {"type": "GHSA", "value": "GHSA-8f4m-hccc-8qph"},
                    {"type": "CVE", "value": "CVE-2021-20191"},
                ],
                "references": [
                    {"url": "https://nvd.nist.gov/vuln/detail/CVE-2021-20191"},
                    {
                        "url": "https://access.redhat.com/security/cve/cve-2021-20191"
                    },
                    {
                        "url": "https://bugzilla.redhat.com/show_bug.cgi?id=1916813"
                    },
                ],
                "published_at": "2021-06-01T17:38:00Z",
                "updated_at": "2021-08-12T23:06:00Z",
                "withdrawn_at": None,
            },
            "security_vulnerability": {
                "package": {"ecosystem": "pip", "name": "ansible"},
                "severity": "medium",
                "vulnerable_version_range": "< 2.8.19",
                "first_patched_version": {"identifier": "2.8.19"},
            },
            "url": "https://api.github.com/repos/octocat/hello-world/dependabot/alerts/1",
            "html_url": "https://github.com/octocat/hello-world/security/dependabot/1",
            "created_at": "2022-06-14T15:21:52Z",
            "updated_at": "2022-06-14T15:21:52Z",
            "dismissed_at": None,
            "dismissed_by": None,
            "dismissed_reason": None,
            "dismissed_comment": None,
            "fixed_at": None,
        }
        self.client.get.return_value = response

        alert = await self.api.alert(
            "foo/bar",
            1,
        )

        self.client.get.assert_awaited_once_with(
            "/repos/foo/bar/dependabot/alerts/1",
        )

        self.assertEqual(alert.number, 1)

    async def test_update(self):
        response = create_response()
        response.json.return_value = {
            "number": 1,
            "state": "open",
            "dependency": {
                "package": {"ecosystem": "pip", "name": "ansible"},
                "manifest_path": "path/to/requirements.txt",
                "scope": "runtime",
            },
            "security_advisory": {
                "ghsa_id": "GHSA-8f4m-hccc-8qph",
                "cve_id": "CVE-2021-20191",
                "summary": "Insertion of Sensitive Information into Log File in ansible",
                "description": "A flaw was found in ansible. Credentials, such as secrets, are being disclosed in console log by default and not protected by no_log feature when using those modules. An attacker can take advantage of this information to steal those credentials. The highest threat from this vulnerability is to data confidentiality.",
                "vulnerabilities": [
                    {
                        "package": {"ecosystem": "pip", "name": "ansible"},
                        "severity": "medium",
                        "vulnerable_version_range": ">= 2.9.0, < 2.9.18",
                        "first_patched_version": {"identifier": "2.9.18"},
                    },
                    {
                        "package": {"ecosystem": "pip", "name": "ansible"},
                        "severity": "medium",
                        "vulnerable_version_range": "< 2.8.19",
                        "first_patched_version": {"identifier": "2.8.19"},
                    },
                    {
                        "package": {"ecosystem": "pip", "name": "ansible"},
                        "severity": "medium",
                        "vulnerable_version_range": ">= 2.10.0, < 2.10.7",
                        "first_patched_version": {"identifier": "2.10.7"},
                    },
                ],
                "severity": "medium",
                "cvss": {
                    "vector_string": "CVSS:3.1/AV:L/AC:L/PR:L/UI:N/S:U/C:H/I:N/A:N",
                    "score": 5.5,
                },
                "cwes": [
                    {
                        "cwe_id": "CWE-532",
                        "name": "Insertion of Sensitive Information into Log File",
                    }
                ],
                "identifiers": [
                    {"type": "GHSA", "value": "GHSA-8f4m-hccc-8qph"},
                    {"type": "CVE", "value": "CVE-2021-20191"},
                ],
                "references": [
                    {"url": "https://nvd.nist.gov/vuln/detail/CVE-2021-20191"},
                    {
                        "url": "https://access.redhat.com/security/cve/cve-2021-20191"
                    },
                    {
                        "url": "https://bugzilla.redhat.com/show_bug.cgi?id=1916813"
                    },
                ],
                "published_at": "2021-06-01T17:38:00Z",
                "updated_at": "2021-08-12T23:06:00Z",
                "withdrawn_at": None,
            },
            "security_vulnerability": {
                "package": {"ecosystem": "pip", "name": "ansible"},
                "severity": "medium",
                "vulnerable_version_range": "< 2.8.19",
                "first_patched_version": {"identifier": "2.8.19"},
            },
            "url": "https://api.github.com/repos/octocat/hello-world/dependabot/alerts/1",
            "html_url": "https://github.com/octocat/hello-world/security/dependabot/1",
            "created_at": "2022-06-14T15:21:52Z",
            "updated_at": "2022-06-14T15:21:52Z",
            "dismissed_at": None,
            "dismissed_by": None,
            "dismissed_reason": None,
            "dismissed_comment": None,
            "fixed_at": None,
        }
        self.client.patch.return_value = response

        alert = await self.api.update_alert(
            "foo/bar",
            1,
            AlertState.DISMISSED,
            dismissed_reason=DismissedReason.NOT_USED,
            dismissed_comment="Dependency is not used.",
        )

        self.client.patch.assert_awaited_once_with(
            "/repos/foo/bar/dependabot/alerts/1",
            data={
                "state": "dismissed",
                "dismissed_reason": "not_used",
                "dismissed_comment": "Dependency is not used.",
            },
        )

        self.assertEqual(alert.number, 1)
pontos-25.3.2/tests/github/api/test_labels.py000066400000000000000000000046461476255566300212320ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later
#

# pylint: disable=redefined-builtin

from unittest.mock import MagicMock

import httpx

from pontos.github.api.labels import GitHubAsyncRESTLabels
from tests import AsyncIteratorMock, aiter, anext
from tests.github.api import GitHubAsyncRESTTestCase, create_response


class GitHubAsyncRESTLabelsTestCase(GitHubAsyncRESTTestCase):
    api_cls = GitHubAsyncRESTLabels

    async def test_get_all(self):
        response1 = create_response()
        response1.json.return_value = [{"id": 1, "name": "a"}]
        response2 = create_response()
        response2.json.return_value = [
            {"id": 2, "name": "b"},
            {"id": 3, "name": "c"},
        ]

        self.client.get_all.return_value = AsyncIteratorMock(
            [response1, response2]
        )

        async_it = aiter(self.api.get_all("foo/bar", 123))
        label = await anext(async_it)
        self.assertEqual(label, "a")
        label = await anext(async_it)
        self.assertEqual(label, "b")
        label = await anext(async_it)
        self.assertEqual(label, "c")

        with self.assertRaises(StopAsyncIteration):
            await anext(async_it)

        self.client.get_all.assert_called_once_with(
            "/repos/foo/bar/issues/123/labels",
            params={"per_page": "100"},
        )

    async def test_delete_all(self):
        response = create_response()
        self.client.delete.return_value = response

        await self.api.delete_all("foo/bar", 123)

        self.client.delete.assert_awaited_once_with(
            "/repos/foo/bar/issues/123/labels"
        )

    async def test_set_all(self):
        response = create_response()
        self.client.post.return_value = response

        await self.api.set_all("foo/bar", 123, ["a", "b"])

        self.client.post.assert_awaited_once_with(
            "/repos/foo/bar/issues/123/labels", data={"labels": ["a", "b"]}
        )

    async def test_set_labels_failure(self):
        response = create_response()
        self.client.post.side_effect = httpx.HTTPStatusError(
            "404", request=MagicMock(), response=response
        )

        with self.assertRaises(httpx.HTTPStatusError):
            await self.api.set_all("foo/bar", 123, ["a", "b"])

        self.client.post.assert_awaited_once_with(
            "/repos/foo/bar/issues/123/labels", data={"labels": ["a", "b"]}
        )
pontos-25.3.2/tests/github/api/test_organizations.py000066400000000000000000000452641476255566300226600ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later
#

# pylint: disable=too-many-lines, redefined-builtin, line-too-long

from copy import deepcopy
from pathlib import Path
from unittest.mock import MagicMock

import httpx

from pontos.github.api.errors import GitHubApiError
from pontos.github.api.organizations import (
    GitHubAsyncRESTOrganizations,
    InvitationRole,
    MemberFilter,
    MemberRole,
)
from pontos.github.models.organization import RepositoryType
from tests import AsyncIteratorMock, aiter, anext
from tests.github.api import GitHubAsyncRESTTestCase, create_response

here = Path(__file__).parent

REPOSITORY_DICT = {
    "id": 1,
    "node_id": "MDEwOlJlcG9zaXRvcnkxMjk2MjY5",
    "name": "Hello-World",
    "full_name": "octocat/Hello-World",
    "owner": {
        "login": "octocat",
        "id": 1,
        "node_id": "MDQ6VXNlcjE=",
        "avatar_url": "https://github.com/images/error/octocat_happy.gif",
        "gravatar_id": "",
        "url": "https://api.github.com/users/octocat",
        "html_url": "https://github.com/octocat",
        "followers_url": "https://api.github.com/users/octocat/followers",
        "following_url": "https://api.github.com/users/octocat/following{/other_user}",
        "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
        "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
        "subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
        "organizations_url": "https://api.github.com/users/octocat/orgs",
        "repos_url": "https://api.github.com/users/octocat/repos",
        "events_url": "https://api.github.com/users/octocat/events{/privacy}",
        "received_events_url": "https://api.github.com/users/octocat/received_events",
        "type": "User",
        "site_admin": False,
    },
    "private": False,
    "html_url": "https://github.com/octocat/Hello-World",
    "description": "This your first repo!",
    "fork": False,
    "url": "https://api.github.com/repos/octocat/Hello-World",
    "archive_url": "https://api.github.com/repos/octocat/Hello-World/{archive_format}{/ref}",
    "assignees_url": "https://api.github.com/repos/octocat/Hello-World/assignees{/user}",
    "blobs_url": "https://api.github.com/repos/octocat/Hello-World/git/blobs{/sha}",
    "branches_url": "https://api.github.com/repos/octocat/Hello-World/branches{/branch}",
    "collaborators_url": "https://api.github.com/repos/octocat/Hello-World/collaborators{/collaborator}",
    "comments_url": "https://api.github.com/repos/octocat/Hello-World/comments{/number}",
    "commits_url": "https://api.github.com/repos/octocat/Hello-World/commits{/sha}",
    "compare_url": "https://api.github.com/repos/octocat/Hello-World/compare/{base}...{head}",
    "contents_url": "https://api.github.com/repos/octocat/Hello-World/contents/{+path}",
    "contributors_url": "https://api.github.com/repos/octocat/Hello-World/contributors",
    "deployments_url": "https://api.github.com/repos/octocat/Hello-World/deployments",
    "downloads_url": "https://api.github.com/repos/octocat/Hello-World/downloads",
    "events_url": "https://api.github.com/repos/octocat/Hello-World/events",
    "forks_url": "https://api.github.com/repos/octocat/Hello-World/forks",
    "git_commits_url": "https://api.github.com/repos/octocat/Hello-World/git/commits{/sha}",
    "git_refs_url": "https://api.github.com/repos/octocat/Hello-World/git/refs{/sha}",
    "git_tags_url": "https://api.github.com/repos/octocat/Hello-World/git/tags{/sha}",
    "git_url": "git:github.com/octocat/Hello-World.git",
    "issue_comment_url": "https://api.github.com/repos/octocat/Hello-World/issues/comments{/number}",
    "issue_events_url": "https://api.github.com/repos/octocat/Hello-World/issues/events{/number}",
    "issues_url": "https://api.github.com/repos/octocat/Hello-World/issues{/number}",
    "keys_url": "https://api.github.com/repos/octocat/Hello-World/keys{/key_id}",
    "labels_url": "https://api.github.com/repos/octocat/Hello-World/labels{/name}",
    "languages_url": "https://api.github.com/repos/octocat/Hello-World/languages",
    "merges_url": "https://api.github.com/repos/octocat/Hello-World/merges",
    "milestones_url": "https://api.github.com/repos/octocat/Hello-World/milestones{/number}",
    "notifications_url": "https://api.github.com/repos/octocat/Hello-World/notifications{?since,all,participating}",
    "pulls_url": "https://api.github.com/repos/octocat/Hello-World/pulls{/number}",
    "releases_url": "https://api.github.com/repos/octocat/Hello-World/releases{/id}",
    "ssh_url": "git@github.com:octocat/Hello-World.git",
    "stargazers_url": "https://api.github.com/repos/octocat/Hello-World/stargazers",
    "statuses_url": "https://api.github.com/repos/octocat/Hello-World/statuses/{sha}",
    "subscribers_url": "https://api.github.com/repos/octocat/Hello-World/subscribers",
    "subscription_url": "https://api.github.com/repos/octocat/Hello-World/subscription",
    "tags_url": "https://api.github.com/repos/octocat/Hello-World/tags",
    "teams_url": "https://api.github.com/repos/octocat/Hello-World/teams",
    "trees_url": "https://api.github.com/repos/octocat/Hello-World/git/trees{/sha}",
    "clone_url": "https://github.com/octocat/Hello-World.git",
    "mirror_url": "git:git.example.com/octocat/Hello-World",
    "hooks_url": "https://api.github.com/repos/octocat/Hello-World/hooks",
    "svn_url": "https://svn.github.com/octocat/Hello-World",
    "homepage": "https://github.com",
    "language": None,
    "forks_count": 9,
    "stargazers_count": 80,
    "watchers_count": 80,
    "size": 108,
    "default_branch": "master",
    "open_issues_count": 0,
    "is_template": False,
    "topics": ["octocat", "atom", "electron", "api"],
    "has_issues": True,
    "has_projects": True,
    "has_wiki": True,
    "has_pages": False,
    "has_downloads": True,
    "has_discussions": False,
    "archived": False,
    "disabled": False,
    "visibility": "public",
    "pushed_at": "2011-01-26T19:06:43Z",
    "created_at": "2011-01-26T19:01:12Z",
    "updated_at": "2011-01-26T19:14:43Z",
    "permissions": {"admin": False, "push": False, "pull": True},
    "forks": 1,
    "open_issues": 0,
    "watchers": 1,
}

MEMBER_DICT = {
    "id": 1,
    "login": "octocat",
    "node_id": "MDQ6VXNlcjE=",
    "avatar_url": "https://github.com/images/error/octocat_happy.gif",
    "gravatar_id": "",
    "url": "https://api.github.com/users/octocat",
    "html_url": "https://github.com/octocat",
    "followers_url": "https://api.github.com/users/octocat/followers",
    "following_url": "https://api.github.com/users/octocat/following{/other_user}",
    "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
    "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
    "subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
    "organizations_url": "https://api.github.com/users/octocat/orgs",
    "repos_url": "https://api.github.com/users/octocat/repos",
    "events_url": "https://api.github.com/users/octocat/events{/privacy}",
    "received_events_url": "https://api.github.com/users/octocat/received_events",
    "type": "User",
    "site_admin": False,
}


class GitHubAsyncRESTOrganizationsTestCase(GitHubAsyncRESTTestCase):
    api_cls = GitHubAsyncRESTOrganizations

    async def test_exists(self):
        response = create_response(is_success=True)
        self.client.get.return_value = response

        self.assertTrue(await self.api.exists("foo"))

        self.client.get.assert_awaited_once_with("/orgs/foo")

    async def test_not_exists(self):
        response = create_response(is_success=False)
        self.client.get.return_value = response

        self.assertFalse(await self.api.exists("foo"))

        self.client.get.assert_awaited_once_with("/orgs/foo")

    async def test_get_repositories(self):
        response1 = create_response()
        response1.json.return_value = [REPOSITORY_DICT]
        response2 = create_response()
        repository_dict2 = deepcopy(REPOSITORY_DICT)
        repository_dict2["id"] = 2
        repository_dict3 = deepcopy(REPOSITORY_DICT)
        repository_dict3["id"] = 3
        response2.json.return_value = [repository_dict2, repository_dict3]

        self.client.get_all.return_value = AsyncIteratorMock(
            [response1, response2]
        )

        async_it = aiter(self.api.get_repositories("foo"))
        repo = await anext(async_it)
        self.assertEqual(repo.id, 1)
        repo = await anext(async_it)
        self.assertEqual(repo.id, 2)
        repo = await anext(async_it)
        self.assertEqual(repo.id, 3)

        with self.assertRaises(StopAsyncIteration):
            await anext(async_it)

        self.client.get_all.assert_called_once_with(
            "/orgs/foo/repos",
            params={"per_page": "100", "type": "all"},
        )

    async def test_get_private_repositories(self):
        response1 = create_response()
        response1.json.return_value = [REPOSITORY_DICT]
        response2 = create_response()
        repository_dict2 = deepcopy(REPOSITORY_DICT)
        repository_dict2["id"] = 2
        repository_dict3 = deepcopy(REPOSITORY_DICT)
        repository_dict3["id"] = 3
        response2.json.return_value = [repository_dict2, repository_dict3]

        self.client.get_all.return_value = AsyncIteratorMock(
            [response1, response2]
        )

        async_it = aiter(
            self.api.get_repositories(
                "foo", repository_type=RepositoryType.PRIVATE
            )
        )
        repo = await anext(async_it)
        self.assertEqual(repo.id, 1)
        repo = await anext(async_it)
        self.assertEqual(repo.id, 2)
        repo = await anext(async_it)
        self.assertEqual(repo.id, 3)

        with self.assertRaises(StopAsyncIteration):
            await anext(async_it)

        self.client.get_all.assert_called_once_with(
            "/orgs/foo/repos",
            params={"per_page": "100", "type": "private"},
        )

    async def test_members(self):
        response1 = create_response()
        response1.json.return_value = [MEMBER_DICT]
        response2 = create_response()
        member_dict2 = deepcopy(MEMBER_DICT)
        member_dict2["id"] = 2
        member_dict3 = deepcopy(MEMBER_DICT)
        member_dict3["id"] = 3
        response2.json.return_value = [member_dict2, member_dict3]

        self.client.get_all.return_value = AsyncIteratorMock(
            [response1, response2]
        )

        async_it = aiter(self.api.members("foo"))
        member = await anext(async_it)
        self.assertEqual(member.id, 1)
        member = await anext(async_it)
        self.assertEqual(member.id, 2)
        member = await anext(async_it)
        self.assertEqual(member.id, 3)

        with self.assertRaises(StopAsyncIteration):
            await anext(async_it)

        self.client.get_all.assert_called_once_with(
            "/orgs/foo/members",
            params={"per_page": "100", "filter": "all", "role": "all"},
        )

    async def test_members_admins(self):
        response1 = create_response()
        response1.json.return_value = [MEMBER_DICT]
        response2 = create_response()
        member_dict2 = deepcopy(MEMBER_DICT)
        member_dict2["id"] = 2
        member_dict3 = deepcopy(MEMBER_DICT)
        member_dict3["id"] = 3
        response2.json.return_value = [member_dict2, member_dict3]

        self.client.get_all.return_value = AsyncIteratorMock(
            [response1, response2]
        )

        async_it = aiter(self.api.members("foo", role=MemberRole.ADMIN))
        member = await anext(async_it)
        self.assertEqual(member.id, 1)
        member = await anext(async_it)
        self.assertEqual(member.id, 2)
        member = await anext(async_it)
        self.assertEqual(member.id, 3)

        with self.assertRaises(StopAsyncIteration):
            await anext(async_it)

        self.client.get_all.assert_called_once_with(
            "/orgs/foo/members",
            params={"per_page": "100", "filter": "all", "role": "admin"},
        )

    async def test_members_filter(self):
        response1 = create_response()
        response1.json.return_value = [MEMBER_DICT]
        response2 = create_response()
        member_dict2 = deepcopy(MEMBER_DICT)
        member_dict2["id"] = 2
        member_dict3 = deepcopy(MEMBER_DICT)
        member_dict3["id"] = 3
        response2.json.return_value = [member_dict2, member_dict3]

        self.client.get_all.return_value = AsyncIteratorMock(
            [response1, response2]
        )

        async_it = aiter(
            self.api.members("foo", member_filter=MemberFilter.TWO_FA_DISABLED)
        )
        member = await anext(async_it)
        self.assertEqual(member.id, 1)
        member = await anext(async_it)
        self.assertEqual(member.id, 2)
        member = await anext(async_it)
        self.assertEqual(member.id, 3)

        with self.assertRaises(StopAsyncIteration):
            await anext(async_it)

        self.client.get_all.assert_called_once_with(
            "/orgs/foo/members",
            params={"per_page": "100", "filter": "2fa_disabled", "role": "all"},
        )

    async def test_invite_email(self):
        response = create_response(is_success=False)
        self.client.post.return_value = response

        await self.api.invite(
            "foo",
            email="foo@bar.com",
        )

        self.client.post.assert_awaited_once_with(
            "/orgs/foo/invitations",
            data={"role": "direct_member", "email": "foo@bar.com"},
        )

    async def test_invite_invitee(self):
        response = create_response(is_success=False)
        self.client.post.return_value = response

        await self.api.invite(
            "foo",
            invitee_id="foo",
        )

        self.client.post.assert_awaited_once_with(
            "/orgs/foo/invitations",
            data={"role": "direct_member", "invitee_id": "foo"},
        )

    async def test_invite_missing_user(self):
        response = create_response(is_success=False)
        self.client.post.return_value = response

        with self.assertRaises(GitHubApiError):
            await self.api.invite("foo")

    async def test_invite_with_teams(self):
        response = create_response(is_success=False)
        self.client.post.return_value = response

        await self.api.invite("foo", email="foo@bar.com", team_ids=("1", "2"))

        self.client.post.assert_awaited_once_with(
            "/orgs/foo/invitations",
            data={
                "role": "direct_member",
                "email": "foo@bar.com",
                "team_ids": ["1", "2"],
            },
        )

    async def test_invite_with_role(self):
        response = create_response(is_success=False)
        self.client.post.return_value = response

        await self.api.invite(
            "foo", email="foo@bar.com", role=InvitationRole.ADMIN
        )

        self.client.post.assert_awaited_once_with(
            "/orgs/foo/invitations",
            data={
                "role": "admin",
                "email": "foo@bar.com",
            },
        )

    async def test_remove_member(self):
        response = create_response(is_success=False)
        self.client.delete.return_value = response

        await self.api.remove_member("foo", "bar")

        self.client.delete.assert_awaited_once_with(
            "/orgs/foo/memberships/bar",
        )

    async def test_remove_member_failure(self):
        response = create_response()
        self.client.delete.side_effect = httpx.HTTPStatusError(
            "404", request=MagicMock(), response=response
        )

        with self.assertRaises(httpx.HTTPStatusError):
            await self.api.remove_member("foo", "bar")

        self.client.delete.assert_awaited_once_with(
            "/orgs/foo/memberships/bar",
        )

    async def test_outside_collaborators(self):
        response1 = create_response()
        response1.json.return_value = [MEMBER_DICT]
        response2 = create_response()
        member_dict2 = deepcopy(MEMBER_DICT)
        member_dict2["id"] = 2
        member_dict3 = deepcopy(MEMBER_DICT)
        member_dict3["id"] = 3
        response2.json.return_value = [member_dict2, member_dict3]

        self.client.get_all.return_value = AsyncIteratorMock(
            [response1, response2]
        )

        async_it = aiter(self.api.outside_collaborators("foo"))
        member = await anext(async_it)
        self.assertEqual(member.id, 1)
        member = await anext(async_it)
        self.assertEqual(member.id, 2)
        member = await anext(async_it)
        self.assertEqual(member.id, 3)

        with self.assertRaises(StopAsyncIteration):
            await anext(async_it)

        self.client.get_all.assert_called_once_with(
            "/orgs/foo/outside_collaborators",
            params={"per_page": "100", "filter": "all"},
        )

    async def test_outside_collaborators_filter(self):
        response1 = create_response()
        response1.json.return_value = [MEMBER_DICT]
        response2 = create_response()
        member_dict2 = deepcopy(MEMBER_DICT)
        member_dict2["id"] = 2
        member_dict3 = deepcopy(MEMBER_DICT)
        member_dict3["id"] = 3
        response2.json.return_value = [member_dict2, member_dict3]

        self.client.get_all.return_value = AsyncIteratorMock(
            [response1, response2]
        )

        async_it = aiter(
            self.api.outside_collaborators(
                "foo", member_filter=MemberFilter.TWO_FA_DISABLED
            )
        )
        member = await anext(async_it)
        self.assertEqual(member.id, 1)
        member = await anext(async_it)
        self.assertEqual(member.id, 2)
        member = await anext(async_it)
        self.assertEqual(member.id, 3)

        with self.assertRaises(StopAsyncIteration):
            await anext(async_it)

        self.client.get_all.assert_called_once_with(
            "/orgs/foo/outside_collaborators",
            params={"per_page": "100", "filter": "2fa_disabled"},
        )

    async def test_remove_outside_collaborator(self):
        response = create_response(is_success=False)
        self.client.delete.return_value = response

        await self.api.remove_outside_collaborator("foo", "bar")

        self.client.delete.assert_awaited_once_with(
            "/orgs/foo/outside_collaborators/bar",
        )

    async def test_remove_outside_collaborator_failure(self):
        response = create_response()
        self.client.delete.side_effect = httpx.HTTPStatusError(
            "404", request=MagicMock(), response=response
        )

        with self.assertRaises(httpx.HTTPStatusError):
            await self.api.remove_outside_collaborator("foo", "bar")

        self.client.delete.assert_awaited_once_with(
            "/orgs/foo/outside_collaborators/bar",
        )
pontos-25.3.2/tests/github/api/test_packages.py000066400000000000000000000221741476255566300215420ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2024 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later
#

from pontos.github.api.packages import GitHubAsyncRESTPackages
from pontos.github.models.packages import (
    Package,
    PackageType,
    PackageVersion,
    PackageVisibility,
)
from pontos.testing import AsyncIteratorMock
from tests import aiter, anext
from tests.github.api import GitHubAsyncRESTTestCase, create_response

from .test_organizations import MEMBER_DICT, REPOSITORY_DICT

PACKAGE_VERSION = {
    "id": 1,
    "name": "v1.0.0",
    "url": "https://api.github.com/orgs/foo/packages/container/bar/versions/1",
    "package_html_url": "https://github.com/orgs/foo/packages/container/bar/versions",
    "created_at": "2022-01-01T00:00:00Z",
    "updated_at": "2022-01-01T00:00:00Z",
    "html_url": "https://github.com/orgs/foo/packages/container/bar/1",
    "metadata": {
        "package_type": "container",
        "container": {"tags": ["latest"]},
    },
}


class GitHubAsyncRESTPackagesTestCase(GitHubAsyncRESTTestCase):
    api_cls = GitHubAsyncRESTPackages

    async def test_exists(self):
        response = create_response(is_success=True)
        self.client.get.return_value = response

        self.assertTrue(
            await self.api.exists(
                organization="foo",
                package_type=PackageType.CONTAINER,
                package_name="bar",
            )
        )

        self.client.get.assert_awaited_once_with(
            "/orgs/foo/packages/container/bar"
        )

    async def test_package(self):
        response = create_response()
        response.json.return_value = {
            "id": 1,
            "name": "bar",
            "package_type": "container",
            "owner": MEMBER_DICT,
            "version_count": 1,
            "visibility": "public",
            "url": "https://api.github.com/orgs/foo/packages/container/bar",
            "tags": ["foo", "bar", "baz"],
            "created_at": "2022-01-01T00:00:00Z",
            "updated_at": "2022-01-01T00:00:00Z",
            "repository": REPOSITORY_DICT,
            "html_url": "https://github.com/orgs/foo/packages/container/repo/bar",
        }

        self.client.get.return_value = response

        package = await self.api.package("foo", PackageType.CONTAINER, "bar")

        self.client.get.assert_awaited_once_with(
            "/orgs/foo/packages/container/bar"
        )

        self.assertIsInstance(package, Package)
        self.assertEqual(package.owner.login, "octocat")
        self.assertEqual(package.name, "bar")
        self.assertEqual(package.version_count, 1)
        self.assertEqual(package.visibility, PackageVisibility.PUBLIC)
        self.assertEqual(
            package.url,
            "https://api.github.com/orgs/foo/packages/container/bar",
        )
        self.assertEqual(package.tags, ["foo", "bar", "baz"])
        self.assertEqual(package.created_at, "2022-01-01T00:00:00Z")
        self.assertEqual(package.updated_at, "2022-01-01T00:00:00Z")
        self.assertEqual(
            package.html_url,
            "https://github.com/orgs/foo/packages/container/repo/bar",
        )

    async def test_packages(self):
        response = create_response()
        response.json.return_value = [
            {
                "id": 1,
                "name": "bar",
                "package_type": "container",
                "owner": MEMBER_DICT,
                "version_count": 1,
                "visibility": "public",
                "url": "https://api.github.com/orgs/foo/packages/container/bar",
                "tags": ["foo", "bar", "baz"],
                "created_at": "2022-01-01T00:00:00Z",
                "updated_at": "2022-01-01T00:00:00Z",
                "repository": REPOSITORY_DICT,
                "html_url": "https://github.com/orgs/foo/packages/container/repo/bar",
            }
        ]

        self.client.get_all.return_value = AsyncIteratorMock([response])

        async_it = aiter(
            self.api.packages("foo", package_type=PackageType.CONTAINER)
        )
        package = await anext(async_it)
        self.assertEqual(package.id, 1)

        with self.assertRaises(StopAsyncIteration):
            await anext(async_it)

        self.client.get_all.assert_called_once_with(
            "/orgs/foo/packages/container",
            params={"per_page": "100"},
        )

    async def test_package_version(self):
        response = create_response()
        response.json.return_value = PACKAGE_VERSION

        self.client.get.return_value = response

        package_version: PackageVersion = await self.api.package_version(
            "foo", PackageType.CONTAINER, "bar", 1
        )

        self.client.get.assert_awaited_once_with(
            "/orgs/foo/packages/container/bar/versions/1"
        )

        self.assertEqual(package_version.id, 1)
        self.assertEqual(package_version.name, "v1.0.0")
        self.assertEqual(
            package_version.url,
            "https://api.github.com/orgs/foo/packages/container/bar/versions/1",
        )
        self.assertEqual(
            package_version.package_html_url,
            "https://github.com/orgs/foo/packages/container/bar/versions",
        )
        self.assertEqual(package_version.created_at, "2022-01-01T00:00:00Z")
        self.assertEqual(package_version.updated_at, "2022-01-01T00:00:00Z")
        self.assertEqual(
            package_version.html_url,
            "https://github.com/orgs/foo/packages/container/bar/1",
        )
        self.assertEqual(
            package_version.metadata.package_type, PackageType.CONTAINER
        )
        self.assertEqual(package_version.metadata.container.tags, ["latest"])

    async def test_package_version_tags(self):
        response = create_response()
        response.json.return_value = {
            "metadata": {"container": {"tags": ["latest", "stable"]}}
        }

        self.client.get.return_value = response

        tags = await self.api.package_version_tags(
            organization="foo",
            package_type=PackageType.CONTAINER,
            package_name="bar",
            version=1,
        )

        self.client.get.assert_awaited_once_with(
            "/orgs/foo/packages/container/bar/versions/1"
        )

        self.assertEqual(tags, ["latest", "stable"])

    async def test_delete_package(self):
        response = create_response(is_success=True)
        self.client.delete.return_value = response

        await self.api.delete_package(
            organization="foo",
            package_type=PackageType.CONTAINER,
            package_name="bar",
        )

        self.client.delete.assert_awaited_once_with(
            "/orgs/foo/packages/container/bar"
        )

    async def test_delete_package_version(self):
        response = create_response(is_success=True)
        self.client.delete.return_value = response

        await self.api.delete_package_version(
            organization="foo",
            package_type=PackageType.CONTAINER,
            package_name="bar",
            version=1,
        )

        self.client.delete.assert_awaited_once_with(
            "/orgs/foo/packages/container/bar/versions/1"
        )

    async def test_package_versions(self):
        response1 = create_response()
        response1.json.return_value = [PACKAGE_VERSION]
        response2 = create_response()
        package_version2 = PACKAGE_VERSION.copy()
        package_version2["id"] = 2
        response2.json.return_value = [package_version2]

        self.client.get_all.return_value = AsyncIteratorMock(
            [response1, response2]
        )

        async_it = aiter(
            self.api.package_versions("foo", PackageType.CONTAINER, "bar")
        )
        package_version = await anext(async_it)
        self.assertEqual(package_version.id, 1)
        package_version = await anext(async_it)
        self.assertEqual(package_version.id, 2)

        with self.assertRaises(StopAsyncIteration):
            await anext(async_it)

        self.client.get_all.assert_called_once_with(
            "/orgs/foo/packages/container/bar/versions"
        )

    async def test_delete_package_with_tag(self):
        response = create_response(is_success=True)
        self.client.delete.return_value = response

        package_version_response = create_response()
        package_version_response.json.return_value = [PACKAGE_VERSION]
        self.client.get_all.return_value = AsyncIteratorMock(
            [package_version_response]
        )

        tags_response = create_response()
        tags_response.json.return_value = {
            "metadata": {"container": {"tags": ["latest", "stable"]}}
        }
        self.client.get.return_value = tags_response

        await self.api.delete_package_with_tag(
            organization="foo",
            package_type=PackageType.CONTAINER,
            package_name="bar",
            tag="latest",
        )

        self.client.get_all.assert_called_once_with(
            "/orgs/foo/packages/container/bar/versions"
        )
        self.client.get.assert_awaited_once_with(
            "/orgs/foo/packages/container/bar/versions/1"
        )
        self.client.delete.assert_awaited_once_with(
            "/orgs/foo/packages/container/bar/versions/1"
        )
pontos-25.3.2/tests/github/api/test_pull_requests.py000066400000000000000000001610711476255566300226730ustar00rootroot00000000000000# Copyright (C) 2022-2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later
#

# pylint: disable=redefined-builtin, line-too-long, too-many-lines
# ruff: noqa: E501

from pathlib import Path
from unittest.mock import MagicMock

from httpx import HTTPStatusError

from pontos.github.api.pull_requests import GitHubAsyncRESTPullRequests
from pontos.github.models.base import FileStatus
from tests import AsyncIteratorMock, aiter, anext
from tests.github.api import GitHubAsyncRESTTestCase, create_response

here = Path(__file__).parent

PULL_REQUEST_JSON = {
    "url": "https://api.github.com/repos/octocat/Hello-World/pulls/1347",
    "id": 1,
    "node_id": "MDExOlB1bGxSZXF1ZXN0MQ==",
    "html_url": "https://github.com/octocat/Hello-World/pull/1347",
    "diff_url": "https://github.com/octocat/Hello-World/pull/1347.diff",
    "patch_url": "https://github.com/octocat/Hello-World/pull/1347.patch",
    "issue_url": "https://api.github.com/repos/octocat/Hello-World/issues/1347",
    "commits_url": "https://api.github.com/repos/octocat/Hello-World/pulls/1347/commits",
    "review_comments_url": "https://api.github.com/repos/octocat/Hello-World/pulls/1347/comments",
    "review_comment_url": "https://api.github.com/repos/octocat/Hello-World/pulls/comments{/number}",
    "comments_url": "https://api.github.com/repos/octocat/Hello-World/issues/1347/comments",
    "statuses_url": "https://api.github.com/repos/octocat/Hello-World/statuses/6dcb09b5b57875f334f61aebed695e2e4193db5e",
    "number": 1347,
    "state": "open",
    "locked": True,
    "title": "Amazing new feature",
    "user": {
        "login": "octocat",
        "id": 1,
        "node_id": "MDQ6VXNlcjE=",
        "avatar_url": "https://github.com/images/error/octocat_happy.gif",
        "gravatar_id": "",
        "url": "https://api.github.com/users/octocat",
        "html_url": "https://github.com/octocat",
        "followers_url": "https://api.github.com/users/octocat/followers",
        "following_url": "https://api.github.com/users/octocat/following{/other_user}",
        "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
        "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
        "subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
        "organizations_url": "https://api.github.com/users/octocat/orgs",
        "repos_url": "https://api.github.com/users/octocat/repos",
        "events_url": "https://api.github.com/users/octocat/events{/privacy}",
        "received_events_url": "https://api.github.com/users/octocat/received_events",
        "type": "User",
        "site_admin": False,
    },
    "body": "Please pull these awesome changes in!",
    "labels": [
        {
            "id": 208045946,
            "node_id": "MDU6TGFiZWwyMDgwNDU5NDY=",
            "url": "https://api.github.com/repos/octocat/Hello-World/labels/bug",
            "name": "bug",
            "description": "Something isn't working",
            "color": "f29513",
            "default": True,
        }
    ],
    "milestone": {
        "url": "https://api.github.com/repos/octocat/Hello-World/milestones/1",
        "html_url": "https://github.com/octocat/Hello-World/milestones/v1.0",
        "labels_url": "https://api.github.com/repos/octocat/Hello-World/milestones/1/labels",
        "id": 1002604,
        "node_id": "MDk6TWlsZXN0b25lMTAwMjYwNA==",
        "number": 1,
        "state": "open",
        "title": "v1.0",
        "description": "Tracking milestone for version 1.0",
        "creator": {
            "login": "octocat",
            "id": 1,
            "node_id": "MDQ6VXNlcjE=",
            "avatar_url": "https://github.com/images/error/octocat_happy.gif",
            "gravatar_id": "",
            "url": "https://api.github.com/users/octocat",
            "html_url": "https://github.com/octocat",
            "followers_url": "https://api.github.com/users/octocat/followers",
            "following_url": "https://api.github.com/users/octocat/following{/other_user}",
            "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
            "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
            "subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
            "organizations_url": "https://api.github.com/users/octocat/orgs",
            "repos_url": "https://api.github.com/users/octocat/repos",
            "events_url": "https://api.github.com/users/octocat/events{/privacy}",
            "received_events_url": "https://api.github.com/users/octocat/received_events",
            "type": "User",
            "site_admin": False,
        },
        "open_issues": 4,
        "closed_issues": 8,
        "created_at": "2011-04-10T20:09:31Z",
        "updated_at": "2014-03-03T18:58:10Z",
        "closed_at": "2013-02-12T13:22:01Z",
        "due_on": "2012-10-09T23:39:01Z",
    },
    "active_lock_reason": "too heated",
    "created_at": "2011-01-26T19:01:12Z",
    "updated_at": "2011-01-26T19:01:12Z",
    "closed_at": "2011-01-26T19:01:12Z",
    "merged_at": "2011-01-26T19:01:12Z",
    "merge_commit_sha": "e5bd3914e2e596debea16f433f57875b5b90bcd6",
    "assignee": {
        "login": "octocat",
        "id": 1,
        "node_id": "MDQ6VXNlcjE=",
        "avatar_url": "https://github.com/images/error/octocat_happy.gif",
        "gravatar_id": "",
        "url": "https://api.github.com/users/octocat",
        "html_url": "https://github.com/octocat",
        "followers_url": "https://api.github.com/users/octocat/followers",
        "following_url": "https://api.github.com/users/octocat/following{/other_user}",
        "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
        "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
        "subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
        "organizations_url": "https://api.github.com/users/octocat/orgs",
        "repos_url": "https://api.github.com/users/octocat/repos",
        "events_url": "https://api.github.com/users/octocat/events{/privacy}",
        "received_events_url": "https://api.github.com/users/octocat/received_events",
        "type": "User",
        "site_admin": False,
    },
    "assignees": [
        {
            "login": "octocat",
            "id": 1,
            "node_id": "MDQ6VXNlcjE=",
            "avatar_url": "https://github.com/images/error/octocat_happy.gif",
            "gravatar_id": "",
            "url": "https://api.github.com/users/octocat",
            "html_url": "https://github.com/octocat",
            "followers_url": "https://api.github.com/users/octocat/followers",
            "following_url": "https://api.github.com/users/octocat/following{/other_user}",
            "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
            "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
            "subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
            "organizations_url": "https://api.github.com/users/octocat/orgs",
            "repos_url": "https://api.github.com/users/octocat/repos",
            "events_url": "https://api.github.com/users/octocat/events{/privacy}",
            "received_events_url": "https://api.github.com/users/octocat/received_events",
            "type": "User",
            "site_admin": False,
        },
        {
            "login": "hubot",
            "id": 1,
            "node_id": "MDQ6VXNlcjE=",
            "avatar_url": "https://github.com/images/error/hubot_happy.gif",
            "gravatar_id": "",
            "url": "https://api.github.com/users/hubot",
            "html_url": "https://github.com/hubot",
            "followers_url": "https://api.github.com/users/hubot/followers",
            "following_url": "https://api.github.com/users/hubot/following{/other_user}",
            "gists_url": "https://api.github.com/users/hubot/gists{/gist_id}",
            "starred_url": "https://api.github.com/users/hubot/starred{/owner}{/repo}",
            "subscriptions_url": "https://api.github.com/users/hubot/subscriptions",
            "organizations_url": "https://api.github.com/users/hubot/orgs",
            "repos_url": "https://api.github.com/users/hubot/repos",
            "events_url": "https://api.github.com/users/hubot/events{/privacy}",
            "received_events_url": "https://api.github.com/users/hubot/received_events",
            "type": "User",
            "site_admin": True,
        },
    ],
    "requested_reviewers": [
        {
            "login": "other_user",
            "id": 1,
            "node_id": "MDQ6VXNlcjE=",
            "avatar_url": "https://github.com/images/error/other_user_happy.gif",
            "gravatar_id": "",
            "url": "https://api.github.com/users/other_user",
            "html_url": "https://github.com/other_user",
            "followers_url": "https://api.github.com/users/other_user/followers",
            "following_url": "https://api.github.com/users/other_user/following{/other_user}",
            "gists_url": "https://api.github.com/users/other_user/gists{/gist_id}",
            "starred_url": "https://api.github.com/users/other_user/starred{/owner}{/repo}",
            "subscriptions_url": "https://api.github.com/users/other_user/subscriptions",
            "organizations_url": "https://api.github.com/users/other_user/orgs",
            "repos_url": "https://api.github.com/users/other_user/repos",
            "events_url": "https://api.github.com/users/other_user/events{/privacy}",
            "received_events_url": "https://api.github.com/users/other_user/received_events",
            "type": "User",
            "site_admin": False,
        }
    ],
    "requested_teams": [
        {
            "id": 1,
            "node_id": "MDQ6VGVhbTE=",
            "url": "https://api.github.com/teams/1",
            "html_url": "https://github.com/orgs/github/teams/justice-league",
            "name": "Justice League",
            "slug": "justice-league",
            "description": "A great team.",
            "privacy": "closed",
            "permission": "admin",
            "members_url": "https://api.github.com/teams/1/members{/member}",
            "repositories_url": "https://api.github.com/teams/1/repos",
        }
    ],
    "head": {
        "label": "octocat:new-topic",
        "ref": "new-topic",
        "sha": "6dcb09b5b57875f334f61aebed695e2e4193db5e",
        "user": {
            "login": "octocat",
            "id": 1,
            "node_id": "MDQ6VXNlcjE=",
            "avatar_url": "https://github.com/images/error/octocat_happy.gif",
            "gravatar_id": "",
            "url": "https://api.github.com/users/octocat",
            "html_url": "https://github.com/octocat",
            "followers_url": "https://api.github.com/users/octocat/followers",
            "following_url": "https://api.github.com/users/octocat/following{/other_user}",
            "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
            "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
            "subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
            "organizations_url": "https://api.github.com/users/octocat/orgs",
            "repos_url": "https://api.github.com/users/octocat/repos",
            "events_url": "https://api.github.com/users/octocat/events{/privacy}",
            "received_events_url": "https://api.github.com/users/octocat/received_events",
            "type": "User",
            "site_admin": False,
        },
        "repo": {
            "id": 1296269,
            "node_id": "MDEwOlJlcG9zaXRvcnkxMjk2MjY5",
            "name": "Hello-World",
            "full_name": "octocat/Hello-World",
            "owner": {
                "login": "octocat",
                "id": 1,
                "node_id": "MDQ6VXNlcjE=",
                "avatar_url": "https://github.com/images/error/octocat_happy.gif",
                "gravatar_id": "",
                "url": "https://api.github.com/users/octocat",
                "html_url": "https://github.com/octocat",
                "followers_url": "https://api.github.com/users/octocat/followers",
                "following_url": "https://api.github.com/users/octocat/following{/other_user}",
                "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
                "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
                "subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
                "organizations_url": "https://api.github.com/users/octocat/orgs",
                "repos_url": "https://api.github.com/users/octocat/repos",
                "events_url": "https://api.github.com/users/octocat/events{/privacy}",
                "received_events_url": "https://api.github.com/users/octocat/received_events",
                "type": "User",
                "site_admin": False,
            },
            "private": False,
            "html_url": "https://github.com/octocat/Hello-World",
            "description": "This your first repo!",
            "fork": False,
            "url": "https://api.github.com/repos/octocat/Hello-World",
            "archive_url": "https://api.github.com/repos/octocat/Hello-World/{archive_format}{/ref}",
            "assignees_url": "https://api.github.com/repos/octocat/Hello-World/assignees{/user}",
            "blobs_url": "https://api.github.com/repos/octocat/Hello-World/git/blobs{/sha}",
            "branches_url": "https://api.github.com/repos/octocat/Hello-World/branches{/branch}",
            "collaborators_url": "https://api.github.com/repos/octocat/Hello-World/collaborators{/collaborator}",
            "comments_url": "https://api.github.com/repos/octocat/Hello-World/comments{/number}",
            "commits_url": "https://api.github.com/repos/octocat/Hello-World/commits{/sha}",
            "compare_url": "https://api.github.com/repos/octocat/Hello-World/compare/{base}...{head}",
            "contents_url": "https://api.github.com/repos/octocat/Hello-World/contents/{+path}",
            "contributors_url": "https://api.github.com/repos/octocat/Hello-World/contributors",
            "deployments_url": "https://api.github.com/repos/octocat/Hello-World/deployments",
            "downloads_url": "https://api.github.com/repos/octocat/Hello-World/downloads",
            "events_url": "https://api.github.com/repos/octocat/Hello-World/events",
            "forks_url": "https://api.github.com/repos/octocat/Hello-World/forks",
            "git_commits_url": "https://api.github.com/repos/octocat/Hello-World/git/commits{/sha}",
            "git_refs_url": "https://api.github.com/repos/octocat/Hello-World/git/refs{/sha}",
            "git_tags_url": "https://api.github.com/repos/octocat/Hello-World/git/tags{/sha}",
            "git_url": "git:github.com/octocat/Hello-World.git",
            "issue_comment_url": "https://api.github.com/repos/octocat/Hello-World/issues/comments{/number}",
            "issue_events_url": "https://api.github.com/repos/octocat/Hello-World/issues/events{/number}",
            "issues_url": "https://api.github.com/repos/octocat/Hello-World/issues{/number}",
            "keys_url": "https://api.github.com/repos/octocat/Hello-World/keys{/key_id}",
            "labels_url": "https://api.github.com/repos/octocat/Hello-World/labels{/name}",
            "languages_url": "https://api.github.com/repos/octocat/Hello-World/languages",
            "merges_url": "https://api.github.com/repos/octocat/Hello-World/merges",
            "milestones_url": "https://api.github.com/repos/octocat/Hello-World/milestones{/number}",
            "notifications_url": "https://api.github.com/repos/octocat/Hello-World/notifications{?since,all,participating}",
            "pulls_url": "https://api.github.com/repos/octocat/Hello-World/pulls{/number}",
            "releases_url": "https://api.github.com/repos/octocat/Hello-World/releases{/id}",
            "ssh_url": "git@github.com:octocat/Hello-World.git",
            "stargazers_url": "https://api.github.com/repos/octocat/Hello-World/stargazers",
            "statuses_url": "https://api.github.com/repos/octocat/Hello-World/statuses/{sha}",
            "subscribers_url": "https://api.github.com/repos/octocat/Hello-World/subscribers",
            "subscription_url": "https://api.github.com/repos/octocat/Hello-World/subscription",
            "tags_url": "https://api.github.com/repos/octocat/Hello-World/tags",
            "teams_url": "https://api.github.com/repos/octocat/Hello-World/teams",
            "trees_url": "https://api.github.com/repos/octocat/Hello-World/git/trees{/sha}",
            "clone_url": "https://github.com/octocat/Hello-World.git",
            "mirror_url": "git:git.example.com/octocat/Hello-World",
            "hooks_url": "https://api.github.com/repos/octocat/Hello-World/hooks",
            "svn_url": "https://svn.github.com/octocat/Hello-World",
            "homepage": "https://github.com",
            "language": None,
            "forks_count": 9,
            "stargazers_count": 80,
            "watchers_count": 80,
            "size": 108,
            "default_branch": "master",
            "open_issues_count": 0,
            "topics": ["octocat", "atom", "electron", "api"],
            "has_issues": True,
            "has_projects": True,
            "has_wiki": True,
            "has_pages": False,
            "has_downloads": True,
            "has_discussions": False,
            "archived": False,
            "disabled": False,
            "pushed_at": "2011-01-26T19:06:43Z",
            "created_at": "2011-01-26T19:01:12Z",
            "updated_at": "2011-01-26T19:14:43Z",
            "permissions": {
                "admin": False,
                "push": False,
                "pull": True,
            },
            "allow_rebase_merge": True,
            "temp_clone_token": "ABTLWHOULUVAXGTRYU7OC2876QJ2O",
            "allow_squash_merge": True,
            "allow_merge_commit": True,
            "allow_forking": True,
            "forks": 123,
            "open_issues": 123,
            "license": {
                "key": "mit",
                "name": "MIT License",
                "url": "https://api.github.com/licenses/mit",
                "spdx_id": "MIT",
                "node_id": "MDc6TGljZW5zZW1pdA==",
            },
            "watchers": 123,
        },
    },
    "base": {
        "label": "octocat:master",
        "ref": "master",
        "sha": "6dcb09b5b57875f334f61aebed695e2e4193db5e",
        "user": {
            "login": "octocat",
            "id": 1,
            "node_id": "MDQ6VXNlcjE=",
            "avatar_url": "https://github.com/images/error/octocat_happy.gif",
            "gravatar_id": "",
            "url": "https://api.github.com/users/octocat",
            "html_url": "https://github.com/octocat",
            "followers_url": "https://api.github.com/users/octocat/followers",
            "following_url": "https://api.github.com/users/octocat/following{/other_user}",
            "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
            "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
            "subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
            "organizations_url": "https://api.github.com/users/octocat/orgs",
            "repos_url": "https://api.github.com/users/octocat/repos",
            "events_url": "https://api.github.com/users/octocat/events{/privacy}",
            "received_events_url": "https://api.github.com/users/octocat/received_events",
            "type": "User",
            "site_admin": False,
        },
        "repo": {
            "id": 1296269,
            "node_id": "MDEwOlJlcG9zaXRvcnkxMjk2MjY5",
            "name": "Hello-World",
            "full_name": "octocat/Hello-World",
            "owner": {
                "login": "octocat",
                "id": 1,
                "node_id": "MDQ6VXNlcjE=",
                "avatar_url": "https://github.com/images/error/octocat_happy.gif",
                "gravatar_id": "",
                "url": "https://api.github.com/users/octocat",
                "html_url": "https://github.com/octocat",
                "followers_url": "https://api.github.com/users/octocat/followers",
                "following_url": "https://api.github.com/users/octocat/following{/other_user}",
                "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
                "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
                "subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
                "organizations_url": "https://api.github.com/users/octocat/orgs",
                "repos_url": "https://api.github.com/users/octocat/repos",
                "events_url": "https://api.github.com/users/octocat/events{/privacy}",
                "received_events_url": "https://api.github.com/users/octocat/received_events",
                "type": "User",
                "site_admin": False,
            },
            "private": False,
            "html_url": "https://github.com/octocat/Hello-World",
            "description": "This your first repo!",
            "fork": False,
            "url": "https://api.github.com/repos/octocat/Hello-World",
            "archive_url": "https://api.github.com/repos/octocat/Hello-World/{archive_format}{/ref}",
            "assignees_url": "https://api.github.com/repos/octocat/Hello-World/assignees{/user}",
            "blobs_url": "https://api.github.com/repos/octocat/Hello-World/git/blobs{/sha}",
            "branches_url": "https://api.github.com/repos/octocat/Hello-World/branches{/branch}",
            "collaborators_url": "https://api.github.com/repos/octocat/Hello-World/collaborators{/collaborator}",
            "comments_url": "https://api.github.com/repos/octocat/Hello-World/comments{/number}",
            "commits_url": "https://api.github.com/repos/octocat/Hello-World/commits{/sha}",
            "compare_url": "https://api.github.com/repos/octocat/Hello-World/compare/{base}...{head}",
            "contents_url": "https://api.github.com/repos/octocat/Hello-World/contents/{+path}",
            "contributors_url": "https://api.github.com/repos/octocat/Hello-World/contributors",
            "deployments_url": "https://api.github.com/repos/octocat/Hello-World/deployments",
            "downloads_url": "https://api.github.com/repos/octocat/Hello-World/downloads",
            "events_url": "https://api.github.com/repos/octocat/Hello-World/events",
            "forks_url": "https://api.github.com/repos/octocat/Hello-World/forks",
            "git_commits_url": "https://api.github.com/repos/octocat/Hello-World/git/commits{/sha}",
            "git_refs_url": "https://api.github.com/repos/octocat/Hello-World/git/refs{/sha}",
            "git_tags_url": "https://api.github.com/repos/octocat/Hello-World/git/tags{/sha}",
            "git_url": "git:github.com/octocat/Hello-World.git",
            "issue_comment_url": "https://api.github.com/repos/octocat/Hello-World/issues/comments{/number}",
            "issue_events_url": "https://api.github.com/repos/octocat/Hello-World/issues/events{/number}",
            "issues_url": "https://api.github.com/repos/octocat/Hello-World/issues{/number}",
            "keys_url": "https://api.github.com/repos/octocat/Hello-World/keys{/key_id}",
            "labels_url": "https://api.github.com/repos/octocat/Hello-World/labels{/name}",
            "languages_url": "https://api.github.com/repos/octocat/Hello-World/languages",
            "merges_url": "https://api.github.com/repos/octocat/Hello-World/merges",
            "milestones_url": "https://api.github.com/repos/octocat/Hello-World/milestones{/number}",
            "notifications_url": "https://api.github.com/repos/octocat/Hello-World/notifications{?since,all,participating}",
            "pulls_url": "https://api.github.com/repos/octocat/Hello-World/pulls{/number}",
            "releases_url": "https://api.github.com/repos/octocat/Hello-World/releases{/id}",
            "ssh_url": "git@github.com:octocat/Hello-World.git",
            "stargazers_url": "https://api.github.com/repos/octocat/Hello-World/stargazers",
            "statuses_url": "https://api.github.com/repos/octocat/Hello-World/statuses/{sha}",
            "subscribers_url": "https://api.github.com/repos/octocat/Hello-World/subscribers",
            "subscription_url": "https://api.github.com/repos/octocat/Hello-World/subscription",
            "tags_url": "https://api.github.com/repos/octocat/Hello-World/tags",
            "teams_url": "https://api.github.com/repos/octocat/Hello-World/teams",
            "trees_url": "https://api.github.com/repos/octocat/Hello-World/git/trees{/sha}",
            "clone_url": "https://github.com/octocat/Hello-World.git",
            "mirror_url": "git:git.example.com/octocat/Hello-World",
            "hooks_url": "https://api.github.com/repos/octocat/Hello-World/hooks",
            "svn_url": "https://svn.github.com/octocat/Hello-World",
            "homepage": "https://github.com",
            "language": None,
            "forks_count": 9,
            "stargazers_count": 80,
            "watchers_count": 80,
            "size": 108,
            "default_branch": "master",
            "open_issues_count": 0,
            "topics": ["octocat", "atom", "electron", "api"],
            "has_issues": True,
            "has_projects": True,
            "has_wiki": True,
            "has_pages": False,
            "has_downloads": True,
            "has_discussions": False,
            "archived": False,
            "disabled": False,
            "pushed_at": "2011-01-26T19:06:43Z",
            "created_at": "2011-01-26T19:01:12Z",
            "updated_at": "2011-01-26T19:14:43Z",
            "permissions": {
                "admin": False,
                "push": False,
                "pull": True,
            },
            "allow_rebase_merge": True,
            "temp_clone_token": "ABTLWHOULUVAXGTRYU7OC2876QJ2O",
            "allow_squash_merge": True,
            "allow_merge_commit": True,
            "forks": 123,
            "open_issues": 123,
            "license": {
                "key": "mit",
                "name": "MIT License",
                "url": "https://api.github.com/licenses/mit",
                "spdx_id": "MIT",
                "node_id": "MDc6TGljZW5zZW1pdA==",
            },
            "watchers": 123,
        },
    },
    "_links": {
        "self": {
            "href": "https://api.github.com/repos/octocat/Hello-World/pulls/1347"
        },
        "html": {"href": "https://github.com/octocat/Hello-World/pull/1347"},
        "issue": {
            "href": "https://api.github.com/repos/octocat/Hello-World/issues/1347"
        },
        "comments": {
            "href": "https://api.github.com/repos/octocat/Hello-World/issues/1347/comments"
        },
        "review_comments": {
            "href": "https://api.github.com/repos/octocat/Hello-World/pulls/1347/comments"
        },
        "review_comment": {
            "href": "https://api.github.com/repos/octocat/Hello-World/pulls/comments{/number}"
        },
        "commits": {
            "href": "https://api.github.com/repos/octocat/Hello-World/pulls/1347/commits"
        },
        "statuses": {
            "href": "https://api.github.com/repos/octocat/Hello-World/statuses/6dcb09b5b57875f334f61aebed695e2e4193db5e"
        },
    },
    "author_association": "OWNER",
    "auto_merge": None,
    "draft": False,
    "merged": False,
    "mergeable": True,
    "rebaseable": True,
    "mergeable_state": "clean",
    "merged_by": {
        "login": "octocat",
        "id": 1,
        "node_id": "MDQ6VXNlcjE=",
        "avatar_url": "https://github.com/images/error/octocat_happy.gif",
        "gravatar_id": "",
        "url": "https://api.github.com/users/octocat",
        "html_url": "https://github.com/octocat",
        "followers_url": "https://api.github.com/users/octocat/followers",
        "following_url": "https://api.github.com/users/octocat/following{/other_user}",
        "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
        "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
        "subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
        "organizations_url": "https://api.github.com/users/octocat/orgs",
        "repos_url": "https://api.github.com/users/octocat/repos",
        "events_url": "https://api.github.com/users/octocat/events{/privacy}",
        "received_events_url": "https://api.github.com/users/octocat/received_events",
        "type": "User",
        "site_admin": False,
    },
    "comments": 10,
    "review_comments": 0,
    "maintainer_can_modify": True,
    "commits": 3,
    "additions": 100,
    "deletions": 3,
    "changed_files": 5,
}

COMMENT_JSON = {
    "id": 1,
    "node_id": "MDEyOklzc3VlQ29tbWVudDE=",
    "url": "https://api.github.com/repos/octocat/Hello-World/issues/comments/1",
    "html_url": "https://github.com/octocat/Hello-World/issues/1347#issuecomment-1",
    "body": "Me too",
    "user": {
        "login": "octocat",
        "id": 1,
        "node_id": "MDQ6VXNlcjE=",
        "avatar_url": "https://github.com/images/error/octocat_happy.gif",
        "gravatar_id": "",
        "url": "https://api.github.com/users/octocat",
        "html_url": "https://github.com/octocat",
        "followers_url": "https://api.github.com/users/octocat/followers",
        "following_url": "https://api.github.com/users/octocat/following{/other_user}",
        "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
        "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
        "subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
        "organizations_url": "https://api.github.com/users/octocat/orgs",
        "repos_url": "https://api.github.com/users/octocat/repos",
        "events_url": "https://api.github.com/users/octocat/events{/privacy}",
        "received_events_url": "https://api.github.com/users/octocat/received_events",
        "type": "User",
        "site_admin": False,
    },
    "created_at": "2011-04-14T16:00:49Z",
    "updated_at": "2011-04-14T16:00:49Z",
    "issue_url": "https://api.github.com/repos/octocat/Hello-World/issues/1347",
    "author_association": "COLLABORATOR",
}


class GitHubAsyncRESTPullRequestsTestCase(GitHubAsyncRESTTestCase):
    api_cls = GitHubAsyncRESTPullRequests

    async def test_exists(self):
        response = create_response(is_success=True)
        self.client.get.return_value = response

        self.assertTrue(await self.api.exists("foo/bar", 123))

        self.client.get.assert_awaited_once_with("/repos/foo/bar/pulls/123")

    async def test_not_exists(self):
        response = create_response(is_success=False)
        self.client.get.return_value = response

        self.assertFalse(await self.api.exists("foo/bar", 123))

        self.client.get.assert_awaited_once_with("/repos/foo/bar/pulls/123")

    async def test_commits(self):
        response1 = create_response()
        response1.json.return_value = [
            {
                "url": "https://api.github.com/repos/octocat/Hello-World/commits/6dcb09b5b57875f334f61aebed695e2e4193db5e",
                "sha": "6dcb09b5b57875f334f61aebed695e2e4193db51",
                "node_id": "MDY6Q29tbWl0NmRjYjA5YjViNTc4NzVmMzM0ZjYxYWViZWQ2OTVlMmU0MTkzZGI1ZQ==",
                "html_url": "https://github.com/octocat/Hello-World/commit/6dcb09b5b57875f334f61aebed695e2e4193db5e",
                "comments_url": "https://api.github.com/repos/octocat/Hello-World/commits/6dcb09b5b57875f334f61aebed695e2e4193db5e/comments",
                "commit": {
                    "url": "https://api.github.com/repos/octocat/Hello-World/git/commits/6dcb09b5b57875f334f61aebed695e2e4193db5e",
                    "author": {
                        "name": "Monalisa Octocat",
                        "email": "support@github.com",
                        "date": "2011-04-14T16:00:49Z",
                    },
                    "committer": {
                        "name": "Monalisa Octocat",
                        "email": "support@github.com",
                        "date": "2011-04-14T16:00:49Z",
                    },
                    "message": "Fix all the bugs",
                    "tree": {
                        "url": "https://api.github.com/repos/octocat/Hello-World/tree/6dcb09b5b57875f334f61aebed695e2e4193db5e",
                        "sha": "6dcb09b5b57875f334f61aebed695e2e4193db5e",
                    },
                    "comment_count": 0,
                    "verification": {
                        "verified": False,
                        "reason": "unsigned",
                        "signature": None,
                        "payload": None,
                    },
                },
                "author": {
                    "login": "octocat",
                    "id": 1,
                    "node_id": "MDQ6VXNlcjE=",
                    "avatar_url": "https://github.com/images/error/octocat_happy.gif",
                    "gravatar_id": "",
                    "url": "https://api.github.com/users/octocat",
                    "html_url": "https://github.com/octocat",
                    "followers_url": "https://api.github.com/users/octocat/followers",
                    "following_url": "https://api.github.com/users/octocat/following{/other_user}",
                    "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
                    "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
                    "subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
                    "organizations_url": "https://api.github.com/users/octocat/orgs",
                    "repos_url": "https://api.github.com/users/octocat/repos",
                    "events_url": "https://api.github.com/users/octocat/events{/privacy}",
                    "received_events_url": "https://api.github.com/users/octocat/received_events",
                    "type": "User",
                    "site_admin": False,
                },
                "committer": {
                    "login": "octocat",
                    "id": 1,
                    "node_id": "MDQ6VXNlcjE=",
                    "avatar_url": "https://github.com/images/error/octocat_happy.gif",
                    "gravatar_id": "",
                    "url": "https://api.github.com/users/octocat",
                    "html_url": "https://github.com/octocat",
                    "followers_url": "https://api.github.com/users/octocat/followers",
                    "following_url": "https://api.github.com/users/octocat/following{/other_user}",
                    "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
                    "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
                    "subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
                    "organizations_url": "https://api.github.com/users/octocat/orgs",
                    "repos_url": "https://api.github.com/users/octocat/repos",
                    "events_url": "https://api.github.com/users/octocat/events{/privacy}",
                    "received_events_url": "https://api.github.com/users/octocat/received_events",
                    "type": "User",
                    "site_admin": False,
                },
                "parents": [
                    {
                        "url": "https://api.github.com/repos/octocat/Hello-World/commits/6dcb09b5b57875f334f61aebed695e2e4193db5e",
                        "sha": "6dcb09b5b57875f334f61aebed695e2e4193db5e",
                    }
                ],
            }
        ]
        response2 = create_response()
        response2.json.return_value = [
            {
                "url": "https://api.github.com/repos/octocat/Hello-World/commits/6dcb09b5b57875f334f61aebed695e2e4193db5e",
                "sha": "6dcb09b5b57875f334f61aebed695e2e4193db52",
                "node_id": "MDY6Q29tbWl0NmRjYjA5YjViNTc4NzVmMzM0ZjYxYWViZWQ2OTVlMmU0MTkzZGI1ZQ==",
                "html_url": "https://github.com/octocat/Hello-World/commit/6dcb09b5b57875f334f61aebed695e2e4193db5e",
                "comments_url": "https://api.github.com/repos/octocat/Hello-World/commits/6dcb09b5b57875f334f61aebed695e2e4193db5e/comments",
                "commit": {
                    "url": "https://api.github.com/repos/octocat/Hello-World/git/commits/6dcb09b5b57875f334f61aebed695e2e4193db5e",
                    "author": {
                        "name": "Monalisa Octocat",
                        "email": "support@github.com",
                        "date": "2011-04-14T16:00:49Z",
                    },
                    "committer": {
                        "name": "Monalisa Octocat",
                        "email": "support@github.com",
                        "date": "2011-04-14T16:00:49Z",
                    },
                    "message": "Fix all the bugs",
                    "tree": {
                        "url": "https://api.github.com/repos/octocat/Hello-World/tree/6dcb09b5b57875f334f61aebed695e2e4193db5e",
                        "sha": "6dcb09b5b57875f334f61aebed695e2e4193db5e",
                    },
                    "comment_count": 0,
                    "verification": {
                        "verified": False,
                        "reason": "unsigned",
                        "signature": None,
                        "payload": None,
                    },
                },
                "author": {
                    "login": "octocat",
                    "id": 1,
                    "node_id": "MDQ6VXNlcjE=",
                    "avatar_url": "https://github.com/images/error/octocat_happy.gif",
                    "gravatar_id": "",
                    "url": "https://api.github.com/users/octocat",
                    "html_url": "https://github.com/octocat",
                    "followers_url": "https://api.github.com/users/octocat/followers",
                    "following_url": "https://api.github.com/users/octocat/following{/other_user}",
                    "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
                    "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
                    "subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
                    "organizations_url": "https://api.github.com/users/octocat/orgs",
                    "repos_url": "https://api.github.com/users/octocat/repos",
                    "events_url": "https://api.github.com/users/octocat/events{/privacy}",
                    "received_events_url": "https://api.github.com/users/octocat/received_events",
                    "type": "User",
                    "site_admin": False,
                },
                "committer": {
                    "login": "octocat",
                    "id": 1,
                    "node_id": "MDQ6VXNlcjE=",
                    "avatar_url": "https://github.com/images/error/octocat_happy.gif",
                    "gravatar_id": "",
                    "url": "https://api.github.com/users/octocat",
                    "html_url": "https://github.com/octocat",
                    "followers_url": "https://api.github.com/users/octocat/followers",
                    "following_url": "https://api.github.com/users/octocat/following{/other_user}",
                    "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
                    "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
                    "subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
                    "organizations_url": "https://api.github.com/users/octocat/orgs",
                    "repos_url": "https://api.github.com/users/octocat/repos",
                    "events_url": "https://api.github.com/users/octocat/events{/privacy}",
                    "received_events_url": "https://api.github.com/users/octocat/received_events",
                    "type": "User",
                    "site_admin": False,
                },
                "parents": [
                    {
                        "url": "https://api.github.com/repos/octocat/Hello-World/commits/6dcb09b5b57875f334f61aebed695e2e4193db5e",
                        "sha": "6dcb09b5b57875f334f61aebed695e2e4193db5e",
                    }
                ],
            },
            {
                "url": "https://api.github.com/repos/octocat/Hello-World/commits/6dcb09b5b57875f334f61aebed695e2e4193db5e",
                "sha": "6dcb09b5b57875f334f61aebed695e2e4193db53",
                "node_id": "MDY6Q29tbWl0NmRjYjA5YjViNTc4NzVmMzM0ZjYxYWViZWQ2OTVlMmU0MTkzZGI1ZQ==",  # noqa: E501
                "html_url": "https://github.com/octocat/Hello-World/commit/6dcb09b5b57875f334f61aebed695e2e4193db5e",
                "comments_url": "https://api.github.com/repos/octocat/Hello-World/commits/6dcb09b5b57875f334f61aebed695e2e4193db5e/comments",
                "commit": {
                    "url": "https://api.github.com/repos/octocat/Hello-World/git/commits/6dcb09b5b57875f334f61aebed695e2e4193db5e",
                    "author": {
                        "name": "Monalisa Octocat",
                        "email": "support@github.com",
                        "date": "2011-04-14T16:00:49Z",
                    },
                    "committer": {
                        "name": "Monalisa Octocat",
                        "email": "support@github.com",
                        "date": "2011-04-14T16:00:49Z",
                    },
                    "message": "Fix all the bugs",
                    "tree": {
                        "url": "https://api.github.com/repos/octocat/Hello-World/tree/6dcb09b5b57875f334f61aebed695e2e4193db5e",
                        "sha": "6dcb09b5b57875f334f61aebed695e2e4193db5e",
                    },
                    "comment_count": 0,
                    "verification": {
                        "verified": False,
                        "reason": "unsigned",
                        "signature": None,
                        "payload": None,
                    },
                },
                "author": {
                    "login": "octocat",
                    "id": 1,
                    "node_id": "MDQ6VXNlcjE=",
                    "avatar_url": "https://github.com/images/error/octocat_happy.gif",
                    "gravatar_id": "",
                    "url": "https://api.github.com/users/octocat",
                    "html_url": "https://github.com/octocat",
                    "followers_url": "https://api.github.com/users/octocat/followers",
                    "following_url": "https://api.github.com/users/octocat/following{/other_user}",
                    "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
                    "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
                    "subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
                    "organizations_url": "https://api.github.com/users/octocat/orgs",
                    "repos_url": "https://api.github.com/users/octocat/repos",
                    "events_url": "https://api.github.com/users/octocat/events{/privacy}",
                    "received_events_url": "https://api.github.com/users/octocat/received_events",
                    "type": "User",
                    "site_admin": False,
                },
                "committer": {
                    "login": "octocat",
                    "id": 1,
                    "node_id": "MDQ6VXNlcjE=",
                    "avatar_url": "https://github.com/images/error/octocat_happy.gif",
                    "gravatar_id": "",
                    "url": "https://api.github.com/users/octocat",
                    "html_url": "https://github.com/octocat",
                    "followers_url": "https://api.github.com/users/octocat/followers",
                    "following_url": "https://api.github.com/users/octocat/following{/other_user}",
                    "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
                    "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
                    "subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
                    "organizations_url": "https://api.github.com/users/octocat/orgs",
                    "repos_url": "https://api.github.com/users/octocat/repos",
                    "events_url": "https://api.github.com/users/octocat/events{/privacy}",
                    "received_events_url": "https://api.github.com/users/octocat/received_events",
                    "type": "User",
                    "site_admin": False,
                },
                "parents": [
                    {
                        "url": "https://api.github.com/repos/octocat/Hello-World/commits/6dcb09b5b57875f334f61aebed695e2e4193db5e",
                        "sha": "6dcb09b5b57875f334f61aebed695e2e4193db5e",
                    }
                ],
            },
        ]

        self.client.get_all.return_value = AsyncIteratorMock(
            [response1, response2]
        )

        async_it = aiter(self.api.commits("foo/bar", 123))
        commit = await anext(async_it)
        self.assertEqual(commit.sha, "6dcb09b5b57875f334f61aebed695e2e4193db51")
        commit = await anext(async_it)
        self.assertEqual(commit.sha, "6dcb09b5b57875f334f61aebed695e2e4193db52")
        commit = await anext(async_it)
        self.assertEqual(commit.sha, "6dcb09b5b57875f334f61aebed695e2e4193db53")

        with self.assertRaises(StopAsyncIteration):
            await anext(async_it)

        self.client.get_all.assert_called_once_with(
            "/repos/foo/bar/pulls/123/commits",
            params={"per_page": "100"},
        )

    async def test_create(self):
        response = create_response()
        response.json.return_value = PULL_REQUEST_JSON
        self.client.post.return_value = response

        pr = await self.api.create(
            "foo/bar",
            head_branch="main",
            base_branch="baz",
            title="Lorem",
            body="Ipsum",
        )

        self.client.post.assert_awaited_once_with(
            "/repos/foo/bar/pulls",
            data={
                "head": "main",
                "base": "baz",
                "title": "Lorem",
                "body": "Ipsum",
            },
        )

        self.assertEqual(pr.id, 1)

    async def test_create_failure(self):
        response = create_response()
        self.client.post.side_effect = HTTPStatusError(
            "404", request=MagicMock(), response=response
        )

        with self.assertRaises(HTTPStatusError):
            await self.api.create(
                "foo/bar",
                head_branch="main",
                base_branch="baz",
                title="Lorem",
                body="Ipsum",
            )

        self.client.post.assert_awaited_once_with(
            "/repos/foo/bar/pulls",
            data={
                "head": "main",
                "base": "baz",
                "title": "Lorem",
                "body": "Ipsum",
            },
        )

    async def test_update(self):
        response = create_response()
        response.json.return_value = PULL_REQUEST_JSON
        self.client.post.return_value = response

        pr = await self.api.update(
            "foo/bar",
            123,
            base_branch="baz",
            title="Lorem",
            body="Ipsum",
        )

        self.client.post.assert_awaited_once_with(
            "/repos/foo/bar/pulls/123",
            data={
                "base": "baz",
                "title": "Lorem",
                "body": "Ipsum",
            },
        )

        self.assertEqual(pr.id, 1)

    async def test_update_simple(self):
        response = create_response()
        response.json.return_value = PULL_REQUEST_JSON
        self.client.post.return_value = response

        pr = await self.api.update(
            "foo/bar",
            123,
        )

        self.client.post.assert_awaited_once_with(
            "/repos/foo/bar/pulls/123",
            data={},
        )
        self.assertEqual(pr.id, 1)

    async def test_update_failure(self):
        response = create_response()
        self.client.post.side_effect = HTTPStatusError(
            "404", request=MagicMock(), response=response
        )

        with self.assertRaises(HTTPStatusError):
            await self.api.update(
                "foo/bar",
                123,
                base_branch="baz",
                title="Lorem",
                body="Ipsum",
            )

        self.client.post.assert_awaited_once_with(
            "/repos/foo/bar/pulls/123",
            data={
                "base": "baz",
                "title": "Lorem",
                "body": "Ipsum",
            },
        )

    async def test_add_comment(self):
        response = create_response()
        response.json.return_value = COMMENT_JSON
        self.client.post.return_value = response

        comment = await self.api.add_comment(
            "foo/bar",
            123,
            "Lorem Ipsum",
        )

        self.client.post.assert_awaited_once_with(
            "/repos/foo/bar/issues/123/comments",
            data={
                "body": "Lorem Ipsum",
            },
        )

        self.assertEqual(comment.id, 1)

    async def test_add_comment_failure(self):
        response = create_response()
        self.client.post.side_effect = HTTPStatusError(
            "404", request=MagicMock(), response=response
        )

        with self.assertRaises(HTTPStatusError):
            await self.api.add_comment(
                "foo/bar",
                123,
                "Lorem Ipsum",
            )

        self.client.post.assert_awaited_once_with(
            "/repos/foo/bar/issues/123/comments",
            data={
                "body": "Lorem Ipsum",
            },
        )

    async def test_update_comment(self):
        response = create_response()
        response.json.return_value = COMMENT_JSON
        self.client.post.return_value = response

        comment = await self.api.update_comment(
            "foo/bar",
            123,
            "Lorem Ipsum",
        )

        self.client.post.assert_awaited_once_with(
            "/repos/foo/bar/issues/comments/123",
            data={
                "body": "Lorem Ipsum",
            },
        )

        self.assertEqual(comment.id, 1)

    async def test_update_comment_failure(self):
        response = create_response()
        self.client.post.side_effect = HTTPStatusError(
            "404", request=MagicMock(), response=response
        )

        with self.assertRaises(HTTPStatusError):
            await self.api.update_comment(
                "foo/bar",
                123,
                "Lorem Ipsum",
            )

        self.client.post.assert_awaited_once_with(
            "/repos/foo/bar/issues/comments/123",
            data={
                "body": "Lorem Ipsum",
            },
        )

    async def test_comments(self):
        response1 = create_response()
        response1.json.return_value = [
            {
                "id": 1,
                "node_id": "MDEyOklzc3VlQ29tbWVudDE=",
                "url": "https://api.github.com/repos/octocat/Hello-World/issues/comments/1",
                "html_url": "https://github.com/octocat/Hello-World/issues/1347#issuecomment-1",
                "body": "Me too",
                "created_at": "2011-04-14T16:00:49Z",
                "updated_at": "2011-04-14T16:00:49Z",
                "issue_url": "https://api.github.com/repos/octocat/Hello-World/issues/1347",
                "author_association": "COLLABORATOR",
            },
            {
                "id": 2,
                "node_id": "MDEyOklzc3VlQ29tbWVudDE=",
                "url": "https://api.github.com/repos/octocat/Hello-World/issues/comments/2",
                "html_url": "https://github.com/octocat/Hello-World/issues/1347#issuecomment-2",
                "body": "Me too",
                "created_at": "2011-04-14T16:00:49Z",
                "updated_at": "2011-04-14T16:00:49Z",
                "issue_url": "https://api.github.com/repos/octocat/Hello-World/issues/1347",
                "author_association": "COLLABORATOR",
            },
        ]
        response2 = create_response()
        response2.json.return_value = [
            {
                "id": 3,
                "node_id": "MDEyOklzc3VlQ29tbWVudDE=",
                "url": "https://api.github.com/repos/octocat/Hello-World/issues/comments/3",
                "html_url": "https://github.com/octocat/Hello-World/issues/1347#issuecomment-3",
                "body": "Me too",
                "created_at": "2011-04-14T16:00:49Z",
                "updated_at": "2011-04-14T16:00:49Z",
                "issue_url": "https://api.github.com/repos/octocat/Hello-World/issues/1347",
                "author_association": "COLLABORATOR",
            },
        ]

        self.client.get_all.return_value = AsyncIteratorMock(
            [response1, response2]
        )

        it = aiter(self.api.comments("foo/bar", 123))

        comment = await anext(it)
        self.assertEqual(comment.id, 1)

        comment = await anext(it)
        self.assertEqual(comment.id, 2)

        comment = await anext(it)
        self.assertEqual(comment.id, 3)

        with self.assertRaises(StopAsyncIteration):
            await anext(it)

        self.client.get_all.assert_called_once_with(
            "/repos/foo/bar/issues/123/comments",
            params={"per_page": "100"},
        )

    async def test_comments_failure(self):
        response = create_response()
        self.client.get_all.side_effect = [
            HTTPStatusError("404", request=MagicMock(), response=response)
        ]

        it = aiter(self.api.comments("foo/bar", 123))
        with self.assertRaises(HTTPStatusError):
            await anext(it)

    async def test_files(self):
        response1 = create_response()
        response1.json.return_value = [
            {"filename": "baz", "status": FileStatus.MODIFIED.value}
        ]
        response2 = create_response()
        response2.json.return_value = [
            {"filename": "foo", "status": FileStatus.DELETED.value},
            {"filename": "bar", "status": FileStatus.MODIFIED.value},
        ]

        self.client.get_all.return_value = AsyncIteratorMock(
            [response1, response2]
        )

        files = await self.api.files("foo/bar", 123)

        self.assertEqual(len(files), 2)
        self.assertEqual(len(files[FileStatus.MODIFIED]), 2)
        self.assertEqual(len(files[FileStatus.DELETED]), 1)
        self.assertEqual(len(files[FileStatus.ADDED]), 0)

        self.client.get_all.assert_called_once_with(
            "/repos/foo/bar/pulls/123/files",
            params={"per_page": "100"},
        )

    async def test_files_failure(self):
        response = create_response()
        self.client.get_all.side_effect = [
            HTTPStatusError("404", request=MagicMock(), response=response)
        ]

        with self.assertRaises(HTTPStatusError):
            await self.api.files("foo/bar", 123)

    async def test_files_with_status_list(self):
        response1 = create_response()
        response1.json.return_value = [
            {"filename": "baz", "status": FileStatus.MODIFIED.value}
        ]
        response2 = create_response()
        response2.json.return_value = [
            {"filename": "foo", "status": FileStatus.DELETED.value},
            {"filename": "bar", "status": FileStatus.MODIFIED.value},
        ]

        self.client.get_all.return_value = AsyncIteratorMock(
            [response1, response2]
        )

        files = await self.api.files(
            "foo/bar", 123, status_list=[FileStatus.ADDED, FileStatus.MODIFIED]
        )

        self.assertEqual(len(files), 1)
        self.assertEqual(len(files[FileStatus.MODIFIED]), 2)
        self.assertEqual(len(files[FileStatus.DELETED]), 0)
        self.assertEqual(len(files[FileStatus.ADDED]), 0)

        self.client.get_all.assert_called_once_with(
            "/repos/foo/bar/pulls/123/files",
            params={"per_page": "100"},
        )

    async def test_get(self):
        response = create_response()
        response.json.return_value = PULL_REQUEST_JSON
        self.client.get.return_value = response

        pr = await self.api.get(
            "foo/bar",
            1,
        )

        self.client.get.assert_awaited_once_with(
            "/repos/foo/bar/pulls/1",
        )

        self.assertEqual(pr.id, 1)

    async def test_get_failure(self):
        response = create_response()
        self.client.get.side_effect = HTTPStatusError(
            "404", request=MagicMock(), response=response
        )

        with self.assertRaises(HTTPStatusError):
            await self.api.get("foo/bar", 123)

        self.client.get.assert_awaited_once_with(
            "/repos/foo/bar/pulls/123",
        )
pontos-25.3.2/tests/github/api/test_release.py000066400000000000000000000415071476255566300214050ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later
#

# pylint: disable=redefined-builtin, line-too-long, too-many-lines

from pathlib import Path
from unittest.mock import MagicMock, call

import httpx

from pontos.github.api.release import GitHubAsyncRESTReleases
from tests import AsyncIteratorMock, AsyncMock, aiter, anext
from tests.github.api import GitHubAsyncRESTTestCase, create_response

here = Path(__file__).parent

RELEASE_JSON = {
    "url": "https://api.github.com/repos/octocat/Hello-World/releases/1",
    "html_url": "https://github.com/octocat/Hello-World/releases/v1.0.0",
    "assets_url": "https://api.github.com/repos/octocat/Hello-World/releases/1/assets",
    "upload_url": "https://uploads.github.com/repos/octocat/Hello-World/releases/1/assets{?name,label}",
    "tarball_url": "https://api.github.com/repos/octocat/Hello-World/tarball/v1.0.0",
    "zipball_url": "https://api.github.com/repos/octocat/Hello-World/zipball/v1.0.0",
    "discussion_url": "https://github.com/octocat/Hello-World/discussions/90",
    "id": 1,
    "node_id": "MDc6UmVsZWFzZTE=",
    "tag_name": "v1.0.0",
    "target_commitish": "master",
    "name": "v1.0.0",
    "body": "Description of the release",
    "draft": False,
    "prerelease": False,
    "created_at": "2013-02-27T19:35:32Z",
    "published_at": "2013-02-27T19:35:32Z",
    "author": {
        "login": "octocat",
        "id": 1,
        "node_id": "MDQ6VXNlcjE=",
        "avatar_url": "https://github.com/images/error/octocat_happy.gif",
        "gravatar_id": "",
        "url": "https://api.github.com/users/octocat",
        "html_url": "https://github.com/octocat",
        "followers_url": "https://api.github.com/users/octocat/followers",
        "following_url": "https://api.github.com/users/octocat/following{/other_user}",
        "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
        "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
        "subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
        "organizations_url": "https://api.github.com/users/octocat/orgs",
        "repos_url": "https://api.github.com/users/octocat/repos",
        "events_url": "https://api.github.com/users/octocat/events{/privacy}",
        "received_events_url": "https://api.github.com/users/octocat/received_events",
        "type": "User",
        "site_admin": False,
    },
    "assets": [
        {
            "url": "https://api.github.com/repos/octocat/Hello-World/releases/assets/1",
            "browser_download_url": "https://github.com/octocat/Hello-World/releases/download/v1.0.0/example.zip",
            "id": 1,
            "node_id": "MDEyOlJlbGVhc2VBc3NldDE=",
            "name": "example.zip",
            "label": "short description",
            "state": "uploaded",
            "content_type": "application/zip",
            "size": 1024,
            "download_count": 42,
            "created_at": "2013-02-27T19:35:32Z",
            "updated_at": "2013-02-27T19:35:32Z",
            "uploader": {
                "login": "octocat",
                "id": 1,
                "node_id": "MDQ6VXNlcjE=",
                "avatar_url": "https://github.com/images/error/octocat_happy.gif",
                "gravatar_id": "",
                "url": "https://api.github.com/users/octocat",
                "html_url": "https://github.com/octocat",
                "followers_url": "https://api.github.com/users/octocat/followers",
                "following_url": "https://api.github.com/users/octocat/following{/other_user}",
                "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
                "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
                "subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
                "organizations_url": "https://api.github.com/users/octocat/orgs",
                "repos_url": "https://api.github.com/users/octocat/repos",
                "events_url": "https://api.github.com/users/octocat/events{/privacy}",
                "received_events_url": "https://api.github.com/users/octocat/received_events",
                "type": "User",
                "site_admin": False,
            },
        }
    ],
}


class GitHubAsyncRESTReleasesTestCase(GitHubAsyncRESTTestCase):
    api_cls = GitHubAsyncRESTReleases

    async def test_exists(self):
        response = create_response(is_success=True)
        self.client.get.return_value = response

        self.assertTrue(await self.api.exists("foo/bar", "v1.2.3"))

        self.client.get.assert_awaited_once_with(
            "/repos/foo/bar/releases/tags/v1.2.3"
        )

    async def test_not_exists(self):
        response = create_response(is_success=False)
        self.client.get.return_value = response

        self.assertFalse(await self.api.exists("foo/bar", "v1.2.3"))

        self.client.get.assert_awaited_once_with(
            "/repos/foo/bar/releases/tags/v1.2.3"
        )

    async def test_create(self):
        response = create_response()
        response.json.return_value = RELEASE_JSON
        self.client.post.return_value = response

        release = await self.api.create(
            "foo/bar",
            "v1.2.3",
            body="foo",
            name="baz",
            target_commitish="stable",
        )

        self.client.post.assert_awaited_once_with(
            "/repos/foo/bar/releases",
            data={
                "tag_name": "v1.2.3",
                "draft": False,
                "prerelease": False,
                "name": "baz",
                "body": "foo",
                "target_commitish": "stable",
            },
        )

        self.assertEqual(release.id, 1)

    async def test_create_simple(self):
        response = create_response()
        response.json.return_value = RELEASE_JSON
        self.client.post.return_value = response

        release = await self.api.create(
            "foo/bar",
            "v1.2.3",
        )

        self.client.post.assert_awaited_once_with(
            "/repos/foo/bar/releases",
            data={
                "tag_name": "v1.2.3",
                "draft": False,
                "prerelease": False,
            },
        )

        self.assertEqual(release.id, 1)

    async def test_create_failure(self):
        response = create_response()
        self.client.post.side_effect = httpx.HTTPStatusError(
            "404", request=MagicMock(), response=response
        )

        with self.assertRaises(httpx.HTTPStatusError):
            await self.api.create(
                "foo/bar",
                "v1.2.3",
                body="foo",
                name="baz",
                target_commitish="stable",
            )

        self.client.post.assert_awaited_once_with(
            "/repos/foo/bar/releases",
            data={
                "tag_name": "v1.2.3",
                "draft": False,
                "prerelease": False,
                "name": "baz",
                "body": "foo",
                "target_commitish": "stable",
            },
        )

    async def test_get(self):
        response = create_response()
        response.json.return_value = RELEASE_JSON
        self.client.get.return_value = response

        release = await self.api.get(
            "foo/bar",
            "v1.2.3",
        )

        self.client.get.assert_awaited_once_with(
            "/repos/foo/bar/releases/tags/v1.2.3",
        )
        self.assertEqual(release.id, 1)

    async def test_get_failure(self):
        response = create_response()
        self.client.get.side_effect = httpx.HTTPStatusError(
            "404", request=MagicMock(), response=response
        )

        with self.assertRaises(httpx.HTTPStatusError):
            await self.api.get(
                "foo/bar",
                "v1.2.3",
            )

        self.client.get.assert_awaited_once_with(
            "/repos/foo/bar/releases/tags/v1.2.3",
        )

    async def test_download_release_tarball(self):
        response = create_response(headers=MagicMock())
        response.headers.get.return_value = 2
        response.aiter_bytes.return_value = AsyncIteratorMock(["1", "2"])
        stream_context = AsyncMock()
        stream_context.__aenter__.return_value = response
        self.client.stream.return_value = stream_context

        async with self.api.download_release_tarball(
            "foo/bar", "v1.2.3"
        ) as download_iterable:
            it = aiter(download_iterable)
            content, progress = await anext(it)

            self.assertEqual(content, "1")
            self.assertEqual(progress, 50)

            content, progress = await anext(it)
            self.assertEqual(content, "2")
            self.assertEqual(progress, 100)

        self.client.stream.assert_called_once_with(
            "https://github.com/foo/bar/archive/refs/tags/v1.2.3.tar.gz"
        )

    async def test_download_release_zip(self):
        response = create_response(headers=MagicMock())
        response.headers.get.return_value = 2
        response.aiter_bytes.return_value = AsyncIteratorMock(["1", "2"])
        stream_context = AsyncMock()
        stream_context.__aenter__.return_value = response
        self.client.stream.return_value = stream_context

        async with self.api.download_release_zip(
            "foo/bar", "v1.2.3"
        ) as download_iterable:
            it = aiter(download_iterable)
            content, progress = await anext(it)

            self.assertEqual(content, "1")
            self.assertEqual(progress, 50)

            content, progress = await anext(it)
            self.assertEqual(content, "2")
            self.assertEqual(progress, 100)

        self.client.stream.assert_called_once_with(
            "https://github.com/foo/bar/archive/refs/tags/v1.2.3.zip"
        )

    async def test_download_release_assets(self):
        get_assets_url_response = create_response()
        data = RELEASE_JSON.copy()
        data.update({"assets_url": "https://foo.bar/assets"})
        get_assets_url_response.json.return_value = data
        get_assets_response = create_response()
        get_assets_response.json.return_value = [
            {"browser_download_url": "http://bar", "name": "bar"},
            {"browser_download_url": "http://baz", "name": "baz"},
        ]
        response = create_response(headers=MagicMock())
        response.headers.get.return_value = 2
        response.aiter_bytes.return_value = AsyncIteratorMock(["1", "2"])
        stream_context = AsyncMock()
        stream_context.__aenter__.return_value = response
        self.client.stream.return_value = stream_context
        self.client.get.side_effect = [
            get_assets_url_response,
            get_assets_response,
        ]

        assets_it = aiter(self.api.download_release_assets("foo/bar", "v1.2.3"))

        name, cm = await anext(assets_it)

        self.client.get.assert_has_awaits(
            [
                call("/repos/foo/bar/releases/tags/v1.2.3"),
                call("https://foo.bar/assets"),
            ]
        )

        self.client.stream.assert_called_once_with("http://bar")

        self.assertEqual(name, "bar")

        async with cm as progress_it:
            it = aiter(progress_it)
            content, progress = await anext(it)

            self.assertEqual(content, "1")
            self.assertEqual(progress, 50)

            content, progress = await anext(it)
            self.assertEqual(content, "2")
            self.assertEqual(progress, 100)

        self.client.stream.reset_mock()
        response = create_response(headers=MagicMock())
        response.headers.get.return_value = 2
        response.aiter_bytes.return_value = AsyncIteratorMock(["1", "2"])
        stream_context = AsyncMock()
        stream_context.__aenter__.return_value = response
        self.client.stream.return_value = stream_context

        name, cm = await anext(assets_it)

        self.client.stream.assert_called_once_with("http://baz")

        self.assertEqual(name, "baz")

        async with cm as progress_it:
            it = aiter(progress_it)
            content, progress = await anext(it)

            self.assertEqual(content, "1")
            self.assertEqual(progress, 50)

            content, progress = await anext(it)
            self.assertEqual(content, "2")
            self.assertEqual(progress, 100)

        with self.assertRaises(StopAsyncIteration):
            await anext(assets_it)

    async def test_download_release_assets_no_assets_url(self):
        get_assets_url_response = create_response()
        data = RELEASE_JSON.copy()
        data.update({"assets_url": None})
        get_assets_url_response.json.return_value = data
        self.client.get.return_value = get_assets_url_response
        assets_it = aiter(self.api.download_release_assets("foo/bar", "v1.2.3"))

        with self.assertRaises(RuntimeError):
            await anext(assets_it)

        self.client.get.assert_awaited_once_with(
            "/repos/foo/bar/releases/tags/v1.2.3"
        )

    async def test_download_release_assets_filter(self):
        get_assets_url_response = create_response()
        data = RELEASE_JSON.copy()
        data.update({"assets_url": "https://foo.bar/assets"})
        get_assets_url_response.json.return_value = data
        get_assets_response = create_response()
        get_assets_response.json.return_value = [
            {"browser_download_url": "http://bar", "name": "bar"},
            {"browser_download_url": "http://baz", "name": "baz"},
        ]
        response = create_response(headers=MagicMock())
        response.headers.get.return_value = 2
        response.aiter_bytes.return_value = AsyncIteratorMock(["1", "2"])
        stream_context = AsyncMock()
        stream_context.__aenter__.return_value = response
        self.client.stream.return_value = stream_context
        self.client.get.side_effect = [
            get_assets_url_response,
            get_assets_response,
        ]

        assets_it = aiter(
            self.api.download_release_assets(
                "foo/bar", "v1.2.3", match_pattern="*r"
            )
        )

        name, cm = await anext(assets_it)

        self.client.get.assert_has_awaits(
            [
                call("/repos/foo/bar/releases/tags/v1.2.3"),
                call("https://foo.bar/assets"),
            ]
        )

        self.client.stream.assert_called_once_with("http://bar")

        self.assertEqual(name, "bar")

        async with cm as progress_it:
            it = aiter(progress_it)
            content, progress = await anext(it)

            self.assertEqual(content, "1")
            self.assertEqual(progress, 50)

            content, progress = await anext(it)
            self.assertEqual(content, "2")
            self.assertEqual(progress, 100)

        with self.assertRaises(StopAsyncIteration):
            await anext(assets_it)

    async def test_upload_release_assets(self):
        response = create_response()
        data = RELEASE_JSON.copy()
        data.update({"upload_url": "https://uploads/assets{?name,label}"})
        response.json.return_value = data
        post_response = create_response()
        self.client.get.return_value = response
        self.client.post.return_value = post_response

        file1 = MagicMock(spec=Path)
        file1.name = "foo.txt"
        content1 = b"foo"
        file1.open.return_value.__enter__.return_value.read.side_effect = [
            content1
        ]
        file2 = MagicMock(spec=Path)
        file2.name = "bar.pdf"
        content2 = b"bar"
        file2.open.return_value.__enter__.return_value.read.side_effect = [
            content2
        ]
        upload_files = [file1, (file2, "application/pdf")]

        def assert_file1(index: int):
            args = self.client.post.await_args_list[index].args
            self.assertEqual(args, ("https://uploads/assets",))
            kwargs = self.client.post.await_args_list[index].kwargs
            self.assertEqual(kwargs["params"], {"name": "foo.txt"})
            self.assertEqual(kwargs["content_type"], "application/octet-stream")

        def assert_file2(index: int):
            args = self.client.post.await_args_list[index].args
            self.assertEqual(args, ("https://uploads/assets",))
            kwargs = self.client.post.await_args_list[index].kwargs
            self.assertEqual(kwargs["params"], {"name": "bar.pdf"})
            self.assertEqual(kwargs["content_type"], "application/pdf")

        it = aiter(
            self.api.upload_release_assets("foo/bar", "v1.2.3", upload_files)
        )

        # the order of the files is non-deterministic

        f = await anext(it)
        if f == file1:
            assert_file1(0)
        else:
            assert_file2(0)

        f = await anext(it)
        if f == file1:
            assert_file1(1)
        else:
            assert_file2(1)

        self.client.get.assert_awaited_once_with(
            "/repos/foo/bar/releases/tags/v1.2.3"
        )
pontos-25.3.2/tests/github/api/test_repositories.py000066400000000000000000001125671476255566300225210ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later
#

# pylint: disable=line-too-long

from unittest.mock import MagicMock

import httpx

from pontos.github.api.repositories import (
    GitHubAsyncRESTRepositories,
    GitIgnoreTemplate,
    LicenseType,
    MergeCommitMessage,
    MergeCommitTitle,
    SquashMergeCommitMessage,
    SquashMergeCommitTitle,
)
from tests.github.api import GitHubAsyncRESTTestCase, create_response

REPOSITORY_JSON = {
    "id": 1,
    "node_id": "MDEwOlJlcG9zaXRvcnkxMjk2MjY5",
    "name": "Hello-World",
    "full_name": "octocat/Hello-World",
    "owner": {
        "login": "octocat",
        "id": 1,
        "node_id": "MDQ6VXNlcjE=",
        "avatar_url": "https://github.com/images/error/octocat_happy.gif",
        "gravatar_id": "",
        "url": "https://api.github.com/users/octocat",
        "html_url": "https://github.com/octocat",
        "followers_url": "https://api.github.com/users/octocat/followers",
        "following_url": "https://api.github.com/users/octocat/following{/other_user}",
        "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
        "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
        "subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
        "organizations_url": "https://api.github.com/users/octocat/orgs",
        "repos_url": "https://api.github.com/users/octocat/repos",
        "events_url": "https://api.github.com/users/octocat/events{/privacy}",
        "received_events_url": "https://api.github.com/users/octocat/received_events",
        "type": "User",
        "site_admin": False,
    },
    "private": False,
    "html_url": "https://github.com/octocat/Hello-World",
    "description": "This your first repo!",
    "fork": False,
    "url": "https://api.github.com/repos/octocat/Hello-World",
    "archive_url": "https://api.github.com/repos/octocat/Hello-World/{archive_format}{/ref}",
    "assignees_url": "https://api.github.com/repos/octocat/Hello-World/assignees{/user}",
    "blobs_url": "https://api.github.com/repos/octocat/Hello-World/git/blobs{/sha}",
    "branches_url": "https://api.github.com/repos/octocat/Hello-World/branches{/branch}",
    "collaborators_url": "https://api.github.com/repos/octocat/Hello-World/collaborators{/collaborator}",
    "comments_url": "https://api.github.com/repos/octocat/Hello-World/comments{/number}",
    "commits_url": "https://api.github.com/repos/octocat/Hello-World/commits{/sha}",
    "compare_url": "https://api.github.com/repos/octocat/Hello-World/compare/{base}...{head}",
    "contents_url": "https://api.github.com/repos/octocat/Hello-World/contents/{+path}",
    "contributors_url": "https://api.github.com/repos/octocat/Hello-World/contributors",
    "deployments_url": "https://api.github.com/repos/octocat/Hello-World/deployments",
    "downloads_url": "https://api.github.com/repos/octocat/Hello-World/downloads",
    "events_url": "https://api.github.com/repos/octocat/Hello-World/events",
    "forks_url": "https://api.github.com/repos/octocat/Hello-World/forks",
    "git_commits_url": "https://api.github.com/repos/octocat/Hello-World/git/commits{/sha}",
    "git_refs_url": "https://api.github.com/repos/octocat/Hello-World/git/refs{/sha}",
    "git_tags_url": "https://api.github.com/repos/octocat/Hello-World/git/tags{/sha}",
    "git_url": "git:github.com/octocat/Hello-World.git",
    "issue_comment_url": "https://api.github.com/repos/octocat/Hello-World/issues/comments{/number}",
    "issue_events_url": "https://api.github.com/repos/octocat/Hello-World/issues/events{/number}",
    "issues_url": "https://api.github.com/repos/octocat/Hello-World/issues{/number}",
    "keys_url": "https://api.github.com/repos/octocat/Hello-World/keys{/key_id}",
    "labels_url": "https://api.github.com/repos/octocat/Hello-World/labels{/name}",
    "languages_url": "https://api.github.com/repos/octocat/Hello-World/languages",
    "merges_url": "https://api.github.com/repos/octocat/Hello-World/merges",
    "milestones_url": "https://api.github.com/repos/octocat/Hello-World/milestones{/number}",
    "notifications_url": "https://api.github.com/repos/octocat/Hello-World/notifications{?since,all,participating}",
    "pulls_url": "https://api.github.com/repos/octocat/Hello-World/pulls{/number}",
    "releases_url": "https://api.github.com/repos/octocat/Hello-World/releases{/id}",
    "ssh_url": "git@github.com:octocat/Hello-World.git",
    "stargazers_url": "https://api.github.com/repos/octocat/Hello-World/stargazers",
    "statuses_url": "https://api.github.com/repos/octocat/Hello-World/statuses/{sha}",
    "subscribers_url": "https://api.github.com/repos/octocat/Hello-World/subscribers",
    "subscription_url": "https://api.github.com/repos/octocat/Hello-World/subscription",
    "tags_url": "https://api.github.com/repos/octocat/Hello-World/tags",
    "teams_url": "https://api.github.com/repos/octocat/Hello-World/teams",
    "trees_url": "https://api.github.com/repos/octocat/Hello-World/git/trees{/sha}",
    "clone_url": "https://github.com/octocat/Hello-World.git",
    "mirror_url": "git:git.example.com/octocat/Hello-World",
    "hooks_url": "https://api.github.com/repos/octocat/Hello-World/hooks",
    "svn_url": "https://svn.github.com/octocat/Hello-World",
    "homepage": "https://github.com",
    "language": None,
    "forks_count": 9,
    "forks": 9,
    "stargazers_count": 80,
    "watchers_count": 80,
    "watchers": 80,
    "size": 108,
    "default_branch": "master",
    "open_issues_count": 0,
    "open_issues": 0,
    "is_template": False,
    "topics": ["octocat", "atom", "electron", "api"],
    "has_issues": True,
    "has_projects": True,
    "has_wiki": True,
    "has_pages": False,
    "has_downloads": True,
    "has_discussions": False,
    "archived": False,
    "disabled": False,
    "visibility": "public",
    "pushed_at": "2011-01-26T19:06:43Z",
    "created_at": "2011-01-26T19:01:12Z",
    "updated_at": "2011-01-26T19:14:43Z",
    "permissions": {"pull": True, "push": False, "admin": False},
    "allow_rebase_merge": True,
    "template_repository": {
        "id": 1296269,
        "node_id": "MDEwOlJlcG9zaXRvcnkxMjk2MjY5",
        "name": "Hello-World-Template",
        "full_name": "octocat/Hello-World-Template",
        "owner": {
            "login": "octocat",
            "id": 1,
            "node_id": "MDQ6VXNlcjE=",
            "avatar_url": "https://github.com/images/error/octocat_happy.gif",
            "gravatar_id": "",
            "url": "https://api.github.com/users/octocat",
            "html_url": "https://github.com/octocat",
            "followers_url": "https://api.github.com/users/octocat/followers",
            "following_url": "https://api.github.com/users/octocat/following{/other_user}",
            "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
            "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
            "subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
            "organizations_url": "https://api.github.com/users/octocat/orgs",
            "repos_url": "https://api.github.com/users/octocat/repos",
            "events_url": "https://api.github.com/users/octocat/events{/privacy}",
            "received_events_url": "https://api.github.com/users/octocat/received_events",
            "type": "User",
            "site_admin": False,
        },
        "private": False,
        "html_url": "https://github.com/octocat/Hello-World-Template",
        "description": "This your first repo!",
        "fork": False,
        "url": "https://api.github.com/repos/octocat/Hello-World-Template",
        "archive_url": "https://api.github.com/repos/octocat/Hello-World-Template/{archive_format}{/ref}",
        "assignees_url": "https://api.github.com/repos/octocat/Hello-World-Template/assignees{/user}",
        "blobs_url": "https://api.github.com/repos/octocat/Hello-World-Template/git/blobs{/sha}",
        "branches_url": "https://api.github.com/repos/octocat/Hello-World-Template/branches{/branch}",
        "collaborators_url": "https://api.github.com/repos/octocat/Hello-World-Template/collaborators{/collaborator}",
        "comments_url": "https://api.github.com/repos/octocat/Hello-World-Template/comments{/number}",
        "commits_url": "https://api.github.com/repos/octocat/Hello-World-Template/commits{/sha}",
        "compare_url": "https://api.github.com/repos/octocat/Hello-World-Template/compare/{base}...{head}",
        "contents_url": "https://api.github.com/repos/octocat/Hello-World-Template/contents/{+path}",
        "contributors_url": "https://api.github.com/repos/octocat/Hello-World-Template/contributors",
        "deployments_url": "https://api.github.com/repos/octocat/Hello-World-Template/deployments",
        "downloads_url": "https://api.github.com/repos/octocat/Hello-World-Template/downloads",
        "events_url": "https://api.github.com/repos/octocat/Hello-World-Template/events",
        "forks_url": "https://api.github.com/repos/octocat/Hello-World-Template/forks",
        "git_commits_url": "https://api.github.com/repos/octocat/Hello-World-Template/git/commits{/sha}",
        "git_refs_url": "https://api.github.com/repos/octocat/Hello-World-Template/git/refs{/sha}",
        "git_tags_url": "https://api.github.com/repos/octocat/Hello-World-Template/git/tags{/sha}",
        "git_url": "git:github.com/octocat/Hello-World-Template.git",
        "issue_comment_url": "https://api.github.com/repos/octocat/Hello-World-Template/issues/comments{/number}",
        "issue_events_url": "https://api.github.com/repos/octocat/Hello-World-Template/issues/events{/number}",
        "issues_url": "https://api.github.com/repos/octocat/Hello-World-Template/issues{/number}",
        "keys_url": "https://api.github.com/repos/octocat/Hello-World-Template/keys{/key_id}",
        "labels_url": "https://api.github.com/repos/octocat/Hello-World-Template/labels{/name}",
        "languages_url": "https://api.github.com/repos/octocat/Hello-World-Template/languages",
        "merges_url": "https://api.github.com/repos/octocat/Hello-World-Template/merges",
        "milestones_url": "https://api.github.com/repos/octocat/Hello-World-Template/milestones{/number}",
        "notifications_url": "https://api.github.com/repos/octocat/Hello-World-Template/notifications{?since,all,participating}",
        "pulls_url": "https://api.github.com/repos/octocat/Hello-World-Template/pulls{/number}",
        "releases_url": "https://api.github.com/repos/octocat/Hello-World-Template/releases{/id}",
        "ssh_url": "git@github.com:octocat/Hello-World-Template.git",
        "stargazers_url": "https://api.github.com/repos/octocat/Hello-World-Template/stargazers",
        "statuses_url": "https://api.github.com/repos/octocat/Hello-World-Template/statuses/{sha}",
        "subscribers_url": "https://api.github.com/repos/octocat/Hello-World-Template/subscribers",
        "subscription_url": "https://api.github.com/repos/octocat/Hello-World-Template/subscription",
        "tags_url": "https://api.github.com/repos/octocat/Hello-World-Template/tags",
        "teams_url": "https://api.github.com/repos/octocat/Hello-World-Template/teams",
        "trees_url": "https://api.github.com/repos/octocat/Hello-World-Template/git/trees{/sha}",
        "clone_url": "https://github.com/octocat/Hello-World-Template.git",
        "mirror_url": "git:git.example.com/octocat/Hello-World-Template",
        "hooks_url": "https://api.github.com/repos/octocat/Hello-World-Template/hooks",
        "svn_url": "https://svn.github.com/octocat/Hello-World-Template",
        "homepage": "https://github.com",
        "language": None,
        "forks": 9,
        "forks_count": 9,
        "stargazers_count": 80,
        "watchers_count": 80,
        "watchers": 80,
        "size": 108,
        "default_branch": "master",
        "open_issues": 0,
        "open_issues_count": 0,
        "is_template": True,
        "license": {
            "key": "mit",
            "name": "MIT License",
            "url": "https://api.github.com/licenses/mit",
            "spdx_id": "MIT",
            "node_id": "MDc6TGljZW5zZW1pdA==",
            "html_url": "https://api.github.com/licenses/mit",
        },
        "topics": ["octocat", "atom", "electron", "api"],
        "has_issues": True,
        "has_projects": True,
        "has_wiki": True,
        "has_pages": False,
        "has_downloads": True,
        "archived": False,
        "disabled": False,
        "visibility": "public",
        "pushed_at": "2011-01-26T19:06:43Z",
        "created_at": "2011-01-26T19:01:12Z",
        "updated_at": "2011-01-26T19:14:43Z",
        "permissions": {"admin": False, "push": False, "pull": True},
        "allow_rebase_merge": True,
        "temp_clone_token": "ABTLWHOULUVAXGTRYU7OC2876QJ2O",
        "allow_squash_merge": True,
        "allow_auto_merge": False,
        "delete_branch_on_merge": True,
        "allow_merge_commit": True,
        "subscribers_count": 42,
        "network_count": 0,
    },
    "temp_clone_token": "ABTLWHOULUVAXGTRYU7OC2876QJ2O",
    "allow_squash_merge": True,
    "allow_auto_merge": False,
    "delete_branch_on_merge": True,
    "allow_merge_commit": True,
    "subscribers_count": 42,
    "network_count": 0,
    "license": {
        "key": "mit",
        "name": "MIT License",
        "spdx_id": "MIT",
        "url": "https://api.github.com/licenses/mit",
        "node_id": "MDc6TGljZW5zZW1pdA==",
    },
    "organization": {
        "login": "octocat",
        "id": 1,
        "node_id": "MDQ6VXNlcjE=",
        "avatar_url": "https://github.com/images/error/octocat_happy.gif",
        "gravatar_id": "",
        "url": "https://api.github.com/users/octocat",
        "html_url": "https://github.com/octocat",
        "followers_url": "https://api.github.com/users/octocat/followers",
        "following_url": "https://api.github.com/users/octocat/following{/other_user}",
        "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
        "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
        "subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
        "organizations_url": "https://api.github.com/users/octocat/orgs",
        "repos_url": "https://api.github.com/users/octocat/repos",
        "events_url": "https://api.github.com/users/octocat/events{/privacy}",
        "received_events_url": "https://api.github.com/users/octocat/received_events",
        "type": "Organization",
        "site_admin": False,
    },
    "parent": {
        "id": 1296269,
        "node_id": "MDEwOlJlcG9zaXRvcnkxMjk2MjY5",
        "name": "Hello-World",
        "full_name": "octocat/Hello-World",
        "owner": {
            "login": "octocat",
            "id": 1,
            "node_id": "MDQ6VXNlcjE=",
            "avatar_url": "https://github.com/images/error/octocat_happy.gif",
            "gravatar_id": "",
            "url": "https://api.github.com/users/octocat",
            "html_url": "https://github.com/octocat",
            "followers_url": "https://api.github.com/users/octocat/followers",
            "following_url": "https://api.github.com/users/octocat/following{/other_user}",
            "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
            "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
            "subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
            "organizations_url": "https://api.github.com/users/octocat/orgs",
            "repos_url": "https://api.github.com/users/octocat/repos",
            "events_url": "https://api.github.com/users/octocat/events{/privacy}",
            "received_events_url": "https://api.github.com/users/octocat/received_events",
            "type": "User",
            "site_admin": False,
        },
        "private": False,
        "html_url": "https://github.com/octocat/Hello-World",
        "description": "This your first repo!",
        "fork": False,
        "url": "https://api.github.com/repos/octocat/Hello-World",
        "archive_url": "https://api.github.com/repos/octocat/Hello-World/{archive_format}{/ref}",
        "assignees_url": "https://api.github.com/repos/octocat/Hello-World/assignees{/user}",
        "blobs_url": "https://api.github.com/repos/octocat/Hello-World/git/blobs{/sha}",
        "branches_url": "https://api.github.com/repos/octocat/Hello-World/branches{/branch}",
        "collaborators_url": "https://api.github.com/repos/octocat/Hello-World/collaborators{/collaborator}",
        "comments_url": "https://api.github.com/repos/octocat/Hello-World/comments{/number}",
        "commits_url": "https://api.github.com/repos/octocat/Hello-World/commits{/sha}",
        "compare_url": "https://api.github.com/repos/octocat/Hello-World/compare/{base}...{head}",
        "contents_url": "https://api.github.com/repos/octocat/Hello-World/contents/{+path}",
        "contributors_url": "https://api.github.com/repos/octocat/Hello-World/contributors",
        "deployments_url": "https://api.github.com/repos/octocat/Hello-World/deployments",
        "downloads_url": "https://api.github.com/repos/octocat/Hello-World/downloads",
        "events_url": "https://api.github.com/repos/octocat/Hello-World/events",
        "forks_url": "https://api.github.com/repos/octocat/Hello-World/forks",
        "git_commits_url": "https://api.github.com/repos/octocat/Hello-World/git/commits{/sha}",
        "git_refs_url": "https://api.github.com/repos/octocat/Hello-World/git/refs{/sha}",
        "git_tags_url": "https://api.github.com/repos/octocat/Hello-World/git/tags{/sha}",
        "git_url": "git:github.com/octocat/Hello-World.git",
        "issue_comment_url": "https://api.github.com/repos/octocat/Hello-World/issues/comments{/number}",
        "issue_events_url": "https://api.github.com/repos/octocat/Hello-World/issues/events{/number}",
        "issues_url": "https://api.github.com/repos/octocat/Hello-World/issues{/number}",
        "keys_url": "https://api.github.com/repos/octocat/Hello-World/keys{/key_id}",
        "labels_url": "https://api.github.com/repos/octocat/Hello-World/labels{/name}",
        "languages_url": "https://api.github.com/repos/octocat/Hello-World/languages",
        "merges_url": "https://api.github.com/repos/octocat/Hello-World/merges",
        "milestones_url": "https://api.github.com/repos/octocat/Hello-World/milestones{/number}",
        "notifications_url": "https://api.github.com/repos/octocat/Hello-World/notifications{?since,all,participating}",
        "pulls_url": "https://api.github.com/repos/octocat/Hello-World/pulls{/number}",
        "releases_url": "https://api.github.com/repos/octocat/Hello-World/releases{/id}",
        "ssh_url": "git@github.com:octocat/Hello-World.git",
        "stargazers_url": "https://api.github.com/repos/octocat/Hello-World/stargazers",
        "statuses_url": "https://api.github.com/repos/octocat/Hello-World/statuses/{sha}",
        "subscribers_url": "https://api.github.com/repos/octocat/Hello-World/subscribers",
        "subscription_url": "https://api.github.com/repos/octocat/Hello-World/subscription",
        "tags_url": "https://api.github.com/repos/octocat/Hello-World/tags",
        "teams_url": "https://api.github.com/repos/octocat/Hello-World/teams",
        "trees_url": "https://api.github.com/repos/octocat/Hello-World/git/trees{/sha}",
        "clone_url": "https://github.com/octocat/Hello-World.git",
        "mirror_url": "git:git.example.com/octocat/Hello-World",
        "hooks_url": "https://api.github.com/repos/octocat/Hello-World/hooks",
        "svn_url": "https://svn.github.com/octocat/Hello-World",
        "homepage": "https://github.com",
        "language": None,
        "forks_count": 9,
        "stargazers_count": 80,
        "watchers_count": 80,
        "size": 108,
        "default_branch": "master",
        "open_issues_count": 0,
        "is_template": True,
        "topics": ["octocat", "atom", "electron", "api"],
        "has_issues": True,
        "has_projects": True,
        "has_wiki": True,
        "has_pages": False,
        "has_downloads": True,
        "archived": False,
        "disabled": False,
        "visibility": "public",
        "pushed_at": "2011-01-26T19:06:43Z",
        "created_at": "2011-01-26T19:01:12Z",
        "updated_at": "2011-01-26T19:14:43Z",
        "permissions": {"admin": False, "push": False, "pull": True},
        "allow_rebase_merge": True,
        "temp_clone_token": "ABTLWHOULUVAXGTRYU7OC2876QJ2O",
        "allow_squash_merge": True,
        "allow_auto_merge": False,
        "delete_branch_on_merge": True,
        "allow_merge_commit": True,
        "subscribers_count": 42,
        "network_count": 0,
        "license": {
            "key": "mit",
            "name": "MIT License",
            "url": "https://api.github.com/licenses/mit",
            "spdx_id": "MIT",
            "node_id": "MDc6TGljZW5zZW1pdA==",
            "html_url": "https://api.github.com/licenses/mit",
        },
        "forks": 1,
        "open_issues": 1,
        "watchers": 1,
    },
    "source": {
        "id": 1296269,
        "node_id": "MDEwOlJlcG9zaXRvcnkxMjk2MjY5",
        "name": "Hello-World",
        "full_name": "octocat/Hello-World",
        "owner": {
            "login": "octocat",
            "id": 1,
            "node_id": "MDQ6VXNlcjE=",
            "avatar_url": "https://github.com/images/error/octocat_happy.gif",
            "gravatar_id": "",
            "url": "https://api.github.com/users/octocat",
            "html_url": "https://github.com/octocat",
            "followers_url": "https://api.github.com/users/octocat/followers",
            "following_url": "https://api.github.com/users/octocat/following{/other_user}",
            "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
            "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
            "subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
            "organizations_url": "https://api.github.com/users/octocat/orgs",
            "repos_url": "https://api.github.com/users/octocat/repos",
            "events_url": "https://api.github.com/users/octocat/events{/privacy}",
            "received_events_url": "https://api.github.com/users/octocat/received_events",
            "type": "User",
            "site_admin": False,
        },
        "private": False,
        "html_url": "https://github.com/octocat/Hello-World",
        "description": "This your first repo!",
        "fork": False,
        "url": "https://api.github.com/repos/octocat/Hello-World",
        "archive_url": "https://api.github.com/repos/octocat/Hello-World/{archive_format}{/ref}",
        "assignees_url": "https://api.github.com/repos/octocat/Hello-World/assignees{/user}",
        "blobs_url": "https://api.github.com/repos/octocat/Hello-World/git/blobs{/sha}",
        "branches_url": "https://api.github.com/repos/octocat/Hello-World/branches{/branch}",
        "collaborators_url": "https://api.github.com/repos/octocat/Hello-World/collaborators{/collaborator}",
        "comments_url": "https://api.github.com/repos/octocat/Hello-World/comments{/number}",
        "commits_url": "https://api.github.com/repos/octocat/Hello-World/commits{/sha}",
        "compare_url": "https://api.github.com/repos/octocat/Hello-World/compare/{base}...{head}",
        "contents_url": "https://api.github.com/repos/octocat/Hello-World/contents/{+path}",
        "contributors_url": "https://api.github.com/repos/octocat/Hello-World/contributors",
        "deployments_url": "https://api.github.com/repos/octocat/Hello-World/deployments",
        "downloads_url": "https://api.github.com/repos/octocat/Hello-World/downloads",
        "events_url": "https://api.github.com/repos/octocat/Hello-World/events",
        "forks_url": "https://api.github.com/repos/octocat/Hello-World/forks",
        "git_commits_url": "https://api.github.com/repos/octocat/Hello-World/git/commits{/sha}",
        "git_refs_url": "https://api.github.com/repos/octocat/Hello-World/git/refs{/sha}",
        "git_tags_url": "https://api.github.com/repos/octocat/Hello-World/git/tags{/sha}",
        "git_url": "git:github.com/octocat/Hello-World.git",
        "issue_comment_url": "https://api.github.com/repos/octocat/Hello-World/issues/comments{/number}",
        "issue_events_url": "https://api.github.com/repos/octocat/Hello-World/issues/events{/number}",
        "issues_url": "https://api.github.com/repos/octocat/Hello-World/issues{/number}",
        "keys_url": "https://api.github.com/repos/octocat/Hello-World/keys{/key_id}",
        "labels_url": "https://api.github.com/repos/octocat/Hello-World/labels{/name}",
        "languages_url": "https://api.github.com/repos/octocat/Hello-World/languages",
        "merges_url": "https://api.github.com/repos/octocat/Hello-World/merges",
        "milestones_url": "https://api.github.com/repos/octocat/Hello-World/milestones{/number}",
        "notifications_url": "https://api.github.com/repos/octocat/Hello-World/notifications{?since,all,participating}",
        "pulls_url": "https://api.github.com/repos/octocat/Hello-World/pulls{/number}",
        "releases_url": "https://api.github.com/repos/octocat/Hello-World/releases{/id}",
        "ssh_url": "git@github.com:octocat/Hello-World.git",
        "stargazers_url": "https://api.github.com/repos/octocat/Hello-World/stargazers",
        "statuses_url": "https://api.github.com/repos/octocat/Hello-World/statuses/{sha}",
        "subscribers_url": "https://api.github.com/repos/octocat/Hello-World/subscribers",
        "subscription_url": "https://api.github.com/repos/octocat/Hello-World/subscription",
        "tags_url": "https://api.github.com/repos/octocat/Hello-World/tags",
        "teams_url": "https://api.github.com/repos/octocat/Hello-World/teams",
        "trees_url": "https://api.github.com/repos/octocat/Hello-World/git/trees{/sha}",
        "clone_url": "https://github.com/octocat/Hello-World.git",
        "mirror_url": "git:git.example.com/octocat/Hello-World",
        "hooks_url": "https://api.github.com/repos/octocat/Hello-World/hooks",
        "svn_url": "https://svn.github.com/octocat/Hello-World",
        "homepage": "https://github.com",
        "language": None,
        "forks_count": 9,
        "stargazers_count": 80,
        "watchers_count": 80,
        "size": 108,
        "default_branch": "master",
        "open_issues_count": 0,
        "is_template": True,
        "topics": ["octocat", "atom", "electron", "api"],
        "has_issues": True,
        "has_projects": True,
        "has_wiki": True,
        "has_pages": False,
        "has_downloads": True,
        "archived": False,
        "disabled": False,
        "visibility": "public",
        "pushed_at": "2011-01-26T19:06:43Z",
        "created_at": "2011-01-26T19:01:12Z",
        "updated_at": "2011-01-26T19:14:43Z",
        "permissions": {"admin": False, "push": False, "pull": True},
        "allow_rebase_merge": True,
        "temp_clone_token": "ABTLWHOULUVAXGTRYU7OC2876QJ2O",
        "allow_squash_merge": True,
        "allow_auto_merge": False,
        "delete_branch_on_merge": True,
        "allow_merge_commit": True,
        "subscribers_count": 42,
        "network_count": 0,
        "license": {
            "key": "mit",
            "name": "MIT License",
            "url": "https://api.github.com/licenses/mit",
            "spdx_id": "MIT",
            "node_id": "MDc6TGljZW5zZW1pdA==",
            "html_url": "https://api.github.com/licenses/mit",
        },
        "forks": 1,
        "open_issues": 1,
        "watchers": 1,
    },
}


class GitHubAsyncRESTRepositoriesTestCase(GitHubAsyncRESTTestCase):
    api_cls = GitHubAsyncRESTRepositories

    async def test_get(self):
        response = create_response()
        response.json.return_value = REPOSITORY_JSON
        self.client.get.return_value = response

        repo = await self.api.get("foo/bar")

        self.client.get.assert_awaited_once_with("/repos/foo/bar")

        self.assertEqual(repo.id, 1)

    async def test_get_failure(self):
        response = create_response()
        self.client.get.side_effect = httpx.HTTPStatusError(
            "404", request=MagicMock(), response=response
        )

        with self.assertRaises(httpx.HTTPStatusError):
            await self.api.get("foo/bar")

        self.client.get.assert_awaited_once_with("/repos/foo/bar")

    async def test_delete(self):
        response = create_response()
        self.client.delete.return_value = response

        await self.api.delete("foo/bar")

        self.client.delete.assert_awaited_once_with("/repos/foo/bar")

    async def test_delete_failure(self):
        response = create_response()
        self.client.delete.side_effect = httpx.HTTPStatusError(
            "404", request=MagicMock(), response=response
        )

        with self.assertRaises(httpx.HTTPStatusError):
            await self.api.delete("foo/bar")

        self.client.delete.assert_awaited_once_with("/repos/foo/bar")

    async def test_create_with_defaults(self):
        response = create_response()
        response.json.return_value = REPOSITORY_JSON
        self.client.post.return_value = response

        repo = await self.api.create("foo", "bar")

        self.client.post.assert_awaited_once_with(
            "/orgs/foo/repos",
            data={
                "name": "bar",
                "private": False,
                "has_issues": True,
                "has_projects": True,
                "has_wiki": True,
                "is_template": False,
                "has_downloads": True,
                "auto_init": False,
                "allow_squash_merge": True,
                "allow_merge_commit": True,
                "allow_rebase_merge": True,
                "allow_auto_merge": False,
                "allow_update_branch": False,
                "delete_branch_on_merge": False,
            },
        )

        self.assertEqual(repo.id, 1)

    async def test_create(self):
        response = create_response()
        response.json.return_value = REPOSITORY_JSON
        self.client.post.return_value = response

        repo = await self.api.create(
            "foo",
            "bar",
            private=True,
            has_issues=False,
            has_projects=False,
            has_wiki=False,
            is_template=True,
            team_id="123",
            has_downloads=False,
            auto_init=True,
            gitignore_template=GitIgnoreTemplate.PYTHON,
            license_template=LicenseType.MIT,
            allow_squash_merge=False,
            allow_merge_commit=False,
            allow_rebase_merge=False,
            allow_auto_merge=True,
            allow_update_branch=True,
            delete_branch_on_merge=True,
            squash_merge_commit_title=SquashMergeCommitTitle.COMMIT_OR_PR_TITLE,
            squash_merge_commit_message=SquashMergeCommitMessage.PR_BODY,
            merge_commit_title=MergeCommitTitle.MERGE_MESSAGE,
            merge_commit_message=MergeCommitMessage.PR_BODY,
        )

        self.client.post.assert_awaited_once_with(
            "/orgs/foo/repos",
            data={
                "name": "bar",
                "private": True,
                "has_issues": False,
                "has_projects": False,
                "has_wiki": False,
                "is_template": True,
                "team_id": "123",
                "has_downloads": False,
                "auto_init": True,
                "license_template": "mit",
                "gitignore_template": "Python",
                "allow_squash_merge": False,
                "allow_merge_commit": False,
                "allow_rebase_merge": False,
                "allow_auto_merge": True,
                "allow_update_branch": True,
                "delete_branch_on_merge": True,
                "squash_merge_commit_title": "COMMIT_OR_PR_TITLE",
                "squash_merge_commit_message": "PR_BODY",
                "merge_commit_title": "MERGE_MESSAGE",
                "merge_commit_message": "PR_BODY",
            },
        )

        self.assertEqual(repo.id, 1)

    async def test_archive(self):
        response = create_response()
        self.client.post.return_value = response

        await self.api.archive("foo/bar")

        self.client.post.assert_awaited_once_with(
            "/repos/foo/bar", data={"archived": True}
        )

    async def test_archive_failure(self):
        response = create_response()
        self.client.post.side_effect = httpx.HTTPStatusError(
            "404", request=MagicMock(), response=response
        )

        with self.assertRaises(httpx.HTTPStatusError):
            await self.api.archive("foo/bar")

        self.client.post.assert_awaited_once_with(
            "/repos/foo/bar", data={"archived": True}
        )

    async def test_update(self):
        response = create_response()
        response.json.return_value = REPOSITORY_JSON
        self.client.post.return_value = response

        repo = await self.api.update(
            "foo/bar",
            name="baz",
            description="A new Baz",
            homepage="http://baz.com",
            private=True,
            has_issues=False,
            has_projects=False,
            has_wiki=False,
            is_template=True,
            allow_squash_merge=False,
            allow_merge_commit=False,
            allow_rebase_merge=False,
            allow_auto_merge=True,
            allow_update_branch=True,
            delete_branch_on_merge=True,
            squash_merge_commit_title=SquashMergeCommitTitle.PR_TITLE,
            squash_merge_commit_message=SquashMergeCommitMessage.PR_BODY,
            merge_commit_title=MergeCommitTitle.PR_TITLE,
            merge_commit_message=MergeCommitMessage.PR_BODY,
            allow_forking=True,
            web_commit_signoff_required=True,
        )

        self.client.post.assert_awaited_once_with(
            "/repos/foo/bar",
            data={
                "name": "baz",
                "description": "A new Baz",
                "homepage": "http://baz.com",
                "private": True,
                "has_issues": False,
                "has_projects": False,
                "has_wiki": False,
                "is_template": True,
                "allow_squash_merge": False,
                "allow_merge_commit": False,
                "allow_rebase_merge": False,
                "allow_auto_merge": True,
                "allow_update_branch": True,
                "delete_branch_on_merge": True,
                "squash_merge_commit_title": "PR_TITLE",
                "squash_merge_commit_message": "PR_BODY",
                "merge_commit_title": "PR_TITLE",
                "merge_commit_message": "PR_BODY",
                "allow_forking": True,
                "web_commit_signoff_required": True,
            },
        )

        self.assertEqual(repo.id, 1)

    async def test_update_with_defaults(self):
        response = create_response()
        response.json.return_value = REPOSITORY_JSON
        self.client.post.return_value = response

        repo = await self.api.update("foo/bar")

        self.client.post.assert_awaited_once_with(
            "/repos/foo/bar",
            data={
                "private": False,
                "has_issues": True,
                "has_projects": True,
                "has_wiki": True,
                "is_template": False,
                "allow_squash_merge": True,
                "allow_merge_commit": True,
                "allow_rebase_merge": True,
                "allow_auto_merge": False,
                "allow_update_branch": False,
                "delete_branch_on_merge": False,
                "allow_forking": False,
                "web_commit_signoff_required": False,
            },
        )

        self.assertEqual(repo.id, 1)

    async def test_update_failure(self):
        response = create_response()
        self.client.post.side_effect = httpx.HTTPStatusError(
            "404", request=MagicMock(), response=response
        )

        with self.assertRaises(httpx.HTTPStatusError):
            await self.api.update("foo/bar")

        self.client.post.assert_awaited_once_with(
            "/repos/foo/bar",
            data={
                "private": False,
                "has_issues": True,
                "has_projects": True,
                "has_wiki": True,
                "is_template": False,
                "allow_squash_merge": True,
                "allow_merge_commit": True,
                "allow_rebase_merge": True,
                "allow_auto_merge": False,
                "allow_update_branch": False,
                "delete_branch_on_merge": False,
                "allow_forking": False,
                "web_commit_signoff_required": False,
            },
        )

    async def test_get_topics(self):
        response = create_response()
        response.json.return_value = {"names": ["foo", "bar", "baz"]}
        self.client.get.return_value = response

        topics = await self.api.topics("foo/bar")

        self.client.get.assert_awaited_once_with("/repos/foo/bar/topics")

        self.assertEqual(len(topics), 3)
        self.assertEqual(topics, ["foo", "bar", "baz"])

    async def test_update_topics(self):
        response = create_response()
        response.json.return_value = {}
        response.json.return_value = {"names": ["foo", "bar"]}

        self.client.put.return_value = response

        new_topics = await self.api.update_topics(
            "foo/bar",
            (
                "foo",
                "bar",
            ),
        )

        self.client.put.assert_awaited_once_with(
            "/repos/foo/bar/topics", data={"names": ["foo", "bar"]}
        )

        self.assertEqual(len(new_topics), 2)
        self.assertEqual(new_topics, ["foo", "bar"])
pontos-25.3.2/tests/github/api/test_search.py000066400000000000000000000564601476255566300212360ustar00rootroot00000000000000# Copyright (C) 2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later
#

# pylint: disable=redefined-builtin, line-too-long

from pontos.github.api.search import GitHubAsyncRESTSearch
from pontos.github.models.search import (
    InNameQualifier,
    IsPublicQualifier,
    OrganizationQualifier,
    RepositorySort,
    SortOrder,
)
from tests import AsyncIteratorMock, aiter, anext
from tests.github.api import GitHubAsyncRESTTestCase, create_response


class GitHubAsyncRESTSeachTestCase(GitHubAsyncRESTTestCase):
    api_cls = GitHubAsyncRESTSearch

    async def test_search_repositories(self):
        response1 = create_response()
        response1.json.return_value = {
            "items": [
                {
                    "id": 1,
                    "node_id": "MDEwOlJlcG9zaXRvcnkxMjk2MjY5",
                    "name": "Hello-World",
                    "full_name": "octocat/Hello-World",
                    "owner": {
                        "login": "octocat",
                        "id": 1,
                        "node_id": "MDQ6VXNlcjE=",
                        "avatar_url": "https://github.com/images/error/octocat_happy.gif",
                        "gravatar_id": "",
                        "url": "https://api.github.com/users/octocat",
                        "html_url": "https://github.com/octocat",
                        "followers_url": "https://api.github.com/users/octocat/followers",
                        "following_url": "https://api.github.com/users/octocat/following{/other_user}",
                        "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
                        "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
                        "subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
                        "organizations_url": "https://api.github.com/users/octocat/orgs",
                        "repos_url": "https://api.github.com/users/octocat/repos",
                        "events_url": "https://api.github.com/users/octocat/events{/privacy}",
                        "received_events_url": "https://api.github.com/users/octocat/received_events",
                        "type": "User",
                        "site_admin": False,
                    },
                    "private": False,
                    "html_url": "https://github.com/octocat/Hello-World",
                    "description": "This your first repo!",
                    "fork": False,
                    "url": "https://api.github.com/repos/octocat/Hello-World",
                    "archive_url": "https://api.github.com/repos/octocat/Hello-World/{archive_format}{/ref}",
                    "assignees_url": "https://api.github.com/repos/octocat/Hello-World/assignees{/user}",
                    "blobs_url": "https://api.github.com/repos/octocat/Hello-World/git/blobs{/sha}",
                    "branches_url": "https://api.github.com/repos/octocat/Hello-World/branches{/branch}",
                    "collaborators_url": "https://api.github.com/repos/octocat/Hello-World/collaborators{/collaborator}",
                    "comments_url": "https://api.github.com/repos/octocat/Hello-World/comments{/number}",
                    "commits_url": "https://api.github.com/repos/octocat/Hello-World/commits{/sha}",
                    "compare_url": "https://api.github.com/repos/octocat/Hello-World/compare/{base}...{head}",
                    "contents_url": "https://api.github.com/repos/octocat/Hello-World/contents/{+path}",
                    "contributors_url": "https://api.github.com/repos/octocat/Hello-World/contributors",
                    "deployments_url": "https://api.github.com/repos/octocat/Hello-World/deployments",
                    "downloads_url": "https://api.github.com/repos/octocat/Hello-World/downloads",
                    "events_url": "https://api.github.com/repos/octocat/Hello-World/events",
                    "forks_url": "https://api.github.com/repos/octocat/Hello-World/forks",
                    "git_commits_url": "https://api.github.com/repos/octocat/Hello-World/git/commits{/sha}",
                    "git_refs_url": "https://api.github.com/repos/octocat/Hello-World/git/refs{/sha}",
                    "git_tags_url": "https://api.github.com/repos/octocat/Hello-World/git/tags{/sha}",
                    "git_url": "git:github.com/octocat/Hello-World.git",
                    "issue_comment_url": "https://api.github.com/repos/octocat/Hello-World/issues/comments{/number}",
                    "issue_events_url": "https://api.github.com/repos/octocat/Hello-World/issues/events{/number}",
                    "issues_url": "https://api.github.com/repos/octocat/Hello-World/issues{/number}",
                    "keys_url": "https://api.github.com/repos/octocat/Hello-World/keys{/key_id}",
                    "labels_url": "https://api.github.com/repos/octocat/Hello-World/labels{/name}",
                    "languages_url": "https://api.github.com/repos/octocat/Hello-World/languages",
                    "merges_url": "https://api.github.com/repos/octocat/Hello-World/merges",
                    "milestones_url": "https://api.github.com/repos/octocat/Hello-World/milestones{/number}",
                    "notifications_url": "https://api.github.com/repos/octocat/Hello-World/notifications{?since,all,participating}",
                    "pulls_url": "https://api.github.com/repos/octocat/Hello-World/pulls{/number}",
                    "releases_url": "https://api.github.com/repos/octocat/Hello-World/releases{/id}",
                    "ssh_url": "git@github.com:octocat/Hello-World.git",
                    "stargazers_url": "https://api.github.com/repos/octocat/Hello-World/stargazers",
                    "statuses_url": "https://api.github.com/repos/octocat/Hello-World/statuses/{sha}",
                    "subscribers_url": "https://api.github.com/repos/octocat/Hello-World/subscribers",
                    "subscription_url": "https://api.github.com/repos/octocat/Hello-World/subscription",
                    "tags_url": "https://api.github.com/repos/octocat/Hello-World/tags",
                    "teams_url": "https://api.github.com/repos/octocat/Hello-World/teams",
                    "trees_url": "https://api.github.com/repos/octocat/Hello-World/git/trees{/sha}",
                    "clone_url": "https://github.com/octocat/Hello-World.git",
                    "mirror_url": "git:git.example.com/octocat/Hello-World",
                    "hooks_url": "https://api.github.com/repos/octocat/Hello-World/hooks",
                    "svn_url": "https://svn.github.com/octocat/Hello-World",
                    "homepage": "https://github.com",
                    "language": None,
                    "forks_count": 9,
                    "stargazers_count": 80,
                    "watchers_count": 80,
                    "size": 108,
                    "default_branch": "master",
                    "open_issues_count": 0,
                    "is_template": False,
                    "topics": ["octocat", "atom", "electron", "api"],
                    "has_issues": True,
                    "has_projects": True,
                    "has_wiki": True,
                    "has_pages": False,
                    "has_downloads": True,
                    "has_discussions": False,
                    "archived": False,
                    "disabled": False,
                    "visibility": "public",
                    "pushed_at": "2011-01-26T19:06:43Z",
                    "created_at": "2011-01-26T19:01:12Z",
                    "updated_at": "2011-01-26T19:14:43Z",
                    "permissions": {
                        "admin": False,
                        "push": False,
                        "pull": True,
                    },
                    "forks": 1,
                    "open_issues": 0,
                    "watchers": 1,
                }
            ]
        }
        response2 = create_response()
        response2.json.return_value = {
            "items": [
                {
                    "id": 2,
                    "node_id": "MDEwOlJlcG9zaXRvcnkxMjk2MjY5",
                    "name": "Hello-World",
                    "full_name": "octocat/Hello-World",
                    "owner": {
                        "login": "octocat",
                        "id": 1,
                        "node_id": "MDQ6VXNlcjE=",
                        "avatar_url": "https://github.com/images/error/octocat_happy.gif",
                        "gravatar_id": "",
                        "url": "https://api.github.com/users/octocat",
                        "html_url": "https://github.com/octocat",
                        "followers_url": "https://api.github.com/users/octocat/followers",
                        "following_url": "https://api.github.com/users/octocat/following{/other_user}",
                        "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
                        "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
                        "subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
                        "organizations_url": "https://api.github.com/users/octocat/orgs",
                        "repos_url": "https://api.github.com/users/octocat/repos",
                        "events_url": "https://api.github.com/users/octocat/events{/privacy}",
                        "received_events_url": "https://api.github.com/users/octocat/received_events",
                        "type": "User",
                        "site_admin": False,
                    },
                    "private": False,
                    "html_url": "https://github.com/octocat/Hello-World",
                    "description": "This your first repo!",
                    "fork": False,
                    "url": "https://api.github.com/repos/octocat/Hello-World",
                    "archive_url": "https://api.github.com/repos/octocat/Hello-World/{archive_format}{/ref}",
                    "assignees_url": "https://api.github.com/repos/octocat/Hello-World/assignees{/user}",
                    "blobs_url": "https://api.github.com/repos/octocat/Hello-World/git/blobs{/sha}",
                    "branches_url": "https://api.github.com/repos/octocat/Hello-World/branches{/branch}",
                    "collaborators_url": "https://api.github.com/repos/octocat/Hello-World/collaborators{/collaborator}",
                    "comments_url": "https://api.github.com/repos/octocat/Hello-World/comments{/number}",
                    "commits_url": "https://api.github.com/repos/octocat/Hello-World/commits{/sha}",
                    "compare_url": "https://api.github.com/repos/octocat/Hello-World/compare/{base}...{head}",
                    "contents_url": "https://api.github.com/repos/octocat/Hello-World/contents/{+path}",
                    "contributors_url": "https://api.github.com/repos/octocat/Hello-World/contributors",
                    "deployments_url": "https://api.github.com/repos/octocat/Hello-World/deployments",
                    "downloads_url": "https://api.github.com/repos/octocat/Hello-World/downloads",
                    "events_url": "https://api.github.com/repos/octocat/Hello-World/events",
                    "forks_url": "https://api.github.com/repos/octocat/Hello-World/forks",
                    "git_commits_url": "https://api.github.com/repos/octocat/Hello-World/git/commits{/sha}",
                    "git_refs_url": "https://api.github.com/repos/octocat/Hello-World/git/refs{/sha}",
                    "git_tags_url": "https://api.github.com/repos/octocat/Hello-World/git/tags{/sha}",
                    "git_url": "git:github.com/octocat/Hello-World.git",
                    "issue_comment_url": "https://api.github.com/repos/octocat/Hello-World/issues/comments{/number}",
                    "issue_events_url": "https://api.github.com/repos/octocat/Hello-World/issues/events{/number}",
                    "issues_url": "https://api.github.com/repos/octocat/Hello-World/issues{/number}",
                    "keys_url": "https://api.github.com/repos/octocat/Hello-World/keys{/key_id}",
                    "labels_url": "https://api.github.com/repos/octocat/Hello-World/labels{/name}",
                    "languages_url": "https://api.github.com/repos/octocat/Hello-World/languages",
                    "merges_url": "https://api.github.com/repos/octocat/Hello-World/merges",
                    "milestones_url": "https://api.github.com/repos/octocat/Hello-World/milestones{/number}",
                    "notifications_url": "https://api.github.com/repos/octocat/Hello-World/notifications{?since,all,participating}",
                    "pulls_url": "https://api.github.com/repos/octocat/Hello-World/pulls{/number}",
                    "releases_url": "https://api.github.com/repos/octocat/Hello-World/releases{/id}",
                    "ssh_url": "git@github.com:octocat/Hello-World.git",
                    "stargazers_url": "https://api.github.com/repos/octocat/Hello-World/stargazers",
                    "statuses_url": "https://api.github.com/repos/octocat/Hello-World/statuses/{sha}",
                    "subscribers_url": "https://api.github.com/repos/octocat/Hello-World/subscribers",
                    "subscription_url": "https://api.github.com/repos/octocat/Hello-World/subscription",
                    "tags_url": "https://api.github.com/repos/octocat/Hello-World/tags",
                    "teams_url": "https://api.github.com/repos/octocat/Hello-World/teams",
                    "trees_url": "https://api.github.com/repos/octocat/Hello-World/git/trees{/sha}",
                    "clone_url": "https://github.com/octocat/Hello-World.git",
                    "mirror_url": "git:git.example.com/octocat/Hello-World",
                    "hooks_url": "https://api.github.com/repos/octocat/Hello-World/hooks",
                    "svn_url": "https://svn.github.com/octocat/Hello-World",
                    "homepage": "https://github.com",
                    "language": None,
                    "forks_count": 9,
                    "stargazers_count": 80,
                    "watchers_count": 80,
                    "size": 108,
                    "default_branch": "master",
                    "open_issues_count": 0,
                    "is_template": False,
                    "topics": ["octocat", "atom", "electron", "api"],
                    "has_issues": True,
                    "has_projects": True,
                    "has_wiki": True,
                    "has_pages": False,
                    "has_downloads": True,
                    "has_discussions": False,
                    "archived": False,
                    "disabled": False,
                    "visibility": "public",
                    "pushed_at": "2011-01-26T19:06:43Z",
                    "created_at": "2011-01-26T19:01:12Z",
                    "updated_at": "2011-01-26T19:14:43Z",
                    "permissions": {
                        "admin": False,
                        "push": False,
                        "pull": True,
                    },
                    "forks": 1,
                    "open_issues": 0,
                    "watchers": 1,
                },
                {
                    "id": 3,
                    "node_id": "MDEwOlJlcG9zaXRvcnkxMjk2MjY5",
                    "name": "Hello-World",
                    "full_name": "octocat/Hello-World",
                    "owner": {
                        "login": "octocat",
                        "id": 1,
                        "node_id": "MDQ6VXNlcjE=",
                        "avatar_url": "https://github.com/images/error/octocat_happy.gif",
                        "gravatar_id": "",
                        "url": "https://api.github.com/users/octocat",
                        "html_url": "https://github.com/octocat",
                        "followers_url": "https://api.github.com/users/octocat/followers",
                        "following_url": "https://api.github.com/users/octocat/following{/other_user}",
                        "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
                        "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
                        "subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
                        "organizations_url": "https://api.github.com/users/octocat/orgs",
                        "repos_url": "https://api.github.com/users/octocat/repos",
                        "events_url": "https://api.github.com/users/octocat/events{/privacy}",
                        "received_events_url": "https://api.github.com/users/octocat/received_events",
                        "type": "User",
                        "site_admin": False,
                    },
                    "private": False,
                    "html_url": "https://github.com/octocat/Hello-World",
                    "description": "This your first repo!",
                    "fork": False,
                    "url": "https://api.github.com/repos/octocat/Hello-World",
                    "archive_url": "https://api.github.com/repos/octocat/Hello-World/{archive_format}{/ref}",
                    "assignees_url": "https://api.github.com/repos/octocat/Hello-World/assignees{/user}",
                    "blobs_url": "https://api.github.com/repos/octocat/Hello-World/git/blobs{/sha}",
                    "branches_url": "https://api.github.com/repos/octocat/Hello-World/branches{/branch}",
                    "collaborators_url": "https://api.github.com/repos/octocat/Hello-World/collaborators{/collaborator}",
                    "comments_url": "https://api.github.com/repos/octocat/Hello-World/comments{/number}",
                    "commits_url": "https://api.github.com/repos/octocat/Hello-World/commits{/sha}",
                    "compare_url": "https://api.github.com/repos/octocat/Hello-World/compare/{base}...{head}",
                    "contents_url": "https://api.github.com/repos/octocat/Hello-World/contents/{+path}",
                    "contributors_url": "https://api.github.com/repos/octocat/Hello-World/contributors",
                    "deployments_url": "https://api.github.com/repos/octocat/Hello-World/deployments",
                    "downloads_url": "https://api.github.com/repos/octocat/Hello-World/downloads",
                    "events_url": "https://api.github.com/repos/octocat/Hello-World/events",
                    "forks_url": "https://api.github.com/repos/octocat/Hello-World/forks",
                    "git_commits_url": "https://api.github.com/repos/octocat/Hello-World/git/commits{/sha}",
                    "git_refs_url": "https://api.github.com/repos/octocat/Hello-World/git/refs{/sha}",
                    "git_tags_url": "https://api.github.com/repos/octocat/Hello-World/git/tags{/sha}",
                    "git_url": "git:github.com/octocat/Hello-World.git",
                    "issue_comment_url": "https://api.github.com/repos/octocat/Hello-World/issues/comments{/number}",
                    "issue_events_url": "https://api.github.com/repos/octocat/Hello-World/issues/events{/number}",
                    "issues_url": "https://api.github.com/repos/octocat/Hello-World/issues{/number}",
                    "keys_url": "https://api.github.com/repos/octocat/Hello-World/keys{/key_id}",
                    "labels_url": "https://api.github.com/repos/octocat/Hello-World/labels{/name}",
                    "languages_url": "https://api.github.com/repos/octocat/Hello-World/languages",
                    "merges_url": "https://api.github.com/repos/octocat/Hello-World/merges",
                    "milestones_url": "https://api.github.com/repos/octocat/Hello-World/milestones{/number}",
                    "notifications_url": "https://api.github.com/repos/octocat/Hello-World/notifications{?since,all,participating}",
                    "pulls_url": "https://api.github.com/repos/octocat/Hello-World/pulls{/number}",
                    "releases_url": "https://api.github.com/repos/octocat/Hello-World/releases{/id}",
                    "ssh_url": "git@github.com:octocat/Hello-World.git",
                    "stargazers_url": "https://api.github.com/repos/octocat/Hello-World/stargazers",
                    "statuses_url": "https://api.github.com/repos/octocat/Hello-World/statuses/{sha}",
                    "subscribers_url": "https://api.github.com/repos/octocat/Hello-World/subscribers",
                    "subscription_url": "https://api.github.com/repos/octocat/Hello-World/subscription",
                    "tags_url": "https://api.github.com/repos/octocat/Hello-World/tags",
                    "teams_url": "https://api.github.com/repos/octocat/Hello-World/teams",
                    "trees_url": "https://api.github.com/repos/octocat/Hello-World/git/trees{/sha}",
                    "clone_url": "https://github.com/octocat/Hello-World.git",
                    "mirror_url": "git:git.example.com/octocat/Hello-World",
                    "hooks_url": "https://api.github.com/repos/octocat/Hello-World/hooks",
                    "svn_url": "https://svn.github.com/octocat/Hello-World",
                    "homepage": "https://github.com",
                    "language": None,
                    "forks_count": 9,
                    "stargazers_count": 80,
                    "watchers_count": 80,
                    "size": 108,
                    "default_branch": "master",
                    "open_issues_count": 0,
                    "is_template": False,
                    "topics": ["octocat", "atom", "electron", "api"],
                    "has_issues": True,
                    "has_projects": True,
                    "has_wiki": True,
                    "has_pages": False,
                    "has_downloads": True,
                    "has_discussions": False,
                    "archived": False,
                    "disabled": False,
                    "visibility": "public",
                    "pushed_at": "2011-01-26T19:06:43Z",
                    "created_at": "2011-01-26T19:01:12Z",
                    "updated_at": "2011-01-26T19:14:43Z",
                    "permissions": {
                        "admin": False,
                        "push": False,
                        "pull": True,
                    },
                    "forks": 1,
                    "open_issues": 0,
                    "watchers": 1,
                },
            ]
        }

        self.client.get_all.return_value = AsyncIteratorMock(
            [response1, response2]
        )

        async_it = aiter(
            self.api.repositories(
                keywords=["hello"],
                qualifiers=[
                    InNameQualifier(),
                    IsPublicQualifier(),
                    OrganizationQualifier("octocat"),
                ],
                sort=RepositorySort.UPDATED,
                order=SortOrder.DESC,
            )
        )
        repo = await anext(async_it)
        self.assertEqual(repo.id, 1)
        repo = await anext(async_it)
        self.assertEqual(repo.id, 2)
        repo = await anext(async_it)
        self.assertEqual(repo.id, 3)

        with self.assertRaises(StopAsyncIteration):
            await anext(async_it)

        self.client.get_all.assert_called_once_with(
            "/search/repositories",
            params={
                "per_page": "100",
                "order": "desc",
                "sort": "updated",
                "q": "hello in:name is:public org:octocat",
            },
        )
pontos-25.3.2/tests/github/api/test_secret_scanning.py000066400000000000000000001100201476255566300231150ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later

# ruff: noqa:E501

from pontos.github.api.secret_scanning import GitHubAsyncRESTSecretScanning
from pontos.github.models.base import SortOrder
from pontos.github.models.secret_scanning import (
    AlertSort,
    AlertState,
    LocationType,
    Resolution,
)
from tests import AsyncIteratorMock, aiter, anext
from tests.github.api import GitHubAsyncRESTTestCase, create_response

ALERTS = [
    {
        "number": 2,
        "created_at": "2020-11-06T18:48:51Z",
        "url": "https://api.github.com/repos/owner/private-repo/secret-scanning/alerts/2",
        "html_url": "https://github.com/owner/private-repo/security/secret-scanning/2",
        "locations_url": "https://api.github.com/repos/owner/private-repo/secret-scanning/alerts/2/locations",
        "state": "resolved",
        "resolution": "false_positive",
        "resolved_at": "2020-11-07T02:47:13Z",
        "resolved_by": {
            "login": "monalisa",
            "id": 2,
            "node_id": "MDQ6VXNlcjI=",
            "avatar_url": "https://alambic.github.com/avatars/u/2?",
            "gravatar_id": "",
            "url": "https://api.github.com/users/monalisa",
            "html_url": "https://github.com/monalisa",
            "followers_url": "https://api.github.com/users/monalisa/followers",
            "following_url": "https://api.github.com/users/monalisa/following{/other_user}",
            "gists_url": "https://api.github.com/users/monalisa/gists{/gist_id}",
            "starred_url": "https://api.github.com/users/monalisa/starred{/owner}{/repo}",
            "subscriptions_url": "https://api.github.com/users/monalisa/subscriptions",
            "organizations_url": "https://api.github.com/users/monalisa/orgs",
            "repos_url": "https://api.github.com/users/monalisa/repos",
            "events_url": "https://api.github.com/users/monalisa/events{/privacy}",
            "received_events_url": "https://api.github.com/users/monalisa/received_events",
            "type": "User",
            "site_admin": True,
        },
        "secret_type": "adafruit_io_key",
        "secret_type_display_name": "Adafruit IO Key",
        "secret": "aio_XXXXXXXXXXXXXXXXXXXXXXXXXXXX",
        "repository": {
            "id": 1296269,
            "node_id": "MDEwOlJlcG9zaXRvcnkxMjk2MjY5",
            "name": "Hello-World",
            "full_name": "octocat/Hello-World",
            "owner": {
                "login": "octocat",
                "id": 1,
                "node_id": "MDQ6VXNlcjE=",
                "avatar_url": "https://github.com/images/error/octocat_happy.gif",
                "gravatar_id": "",
                "url": "https://api.github.com/users/octocat",
                "html_url": "https://github.com/octocat",
                "followers_url": "https://api.github.com/users/octocat/followers",
                "following_url": "https://api.github.com/users/octocat/following{/other_user}",
                "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
                "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
                "subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
                "organizations_url": "https://api.github.com/users/octocat/orgs",
                "repos_url": "https://api.github.com/users/octocat/repos",
                "events_url": "https://api.github.com/users/octocat/events{/privacy}",
                "received_events_url": "https://api.github.com/users/octocat/received_events",
                "type": "User",
                "site_admin": False,
            },
            "private": False,
            "html_url": "https://github.com/octocat/Hello-World",
            "description": "This your first repo!",
            "fork": False,
            "url": "https://api.github.com/repos/octocat/Hello-World",
            "archive_url": "https://api.github.com/repos/octocat/Hello-World/{archive_format}{/ref}",
            "assignees_url": "https://api.github.com/repos/octocat/Hello-World/assignees{/user}",
            "blobs_url": "https://api.github.com/repos/octocat/Hello-World/git/blobs{/sha}",
            "branches_url": "https://api.github.com/repos/octocat/Hello-World/branches{/branch}",
            "collaborators_url": "https://api.github.com/repos/octocat/Hello-World/collaborators{/collaborator}",
            "comments_url": "https://api.github.com/repos/octocat/Hello-World/comments{/number}",
            "commits_url": "https://api.github.com/repos/octocat/Hello-World/commits{/sha}",
            "compare_url": "https://api.github.com/repos/octocat/Hello-World/compare/{base}...{head}",
            "contents_url": "https://api.github.com/repos/octocat/Hello-World/contents/{+path}",
            "contributors_url": "https://api.github.com/repos/octocat/Hello-World/contributors",
            "deployments_url": "https://api.github.com/repos/octocat/Hello-World/deployments",
            "downloads_url": "https://api.github.com/repos/octocat/Hello-World/downloads",
            "events_url": "https://api.github.com/repos/octocat/Hello-World/events",
            "forks_url": "https://api.github.com/repos/octocat/Hello-World/forks",
            "git_commits_url": "https://api.github.com/repos/octocat/Hello-World/git/commits{/sha}",
            "git_refs_url": "https://api.github.com/repos/octocat/Hello-World/git/refs{/sha}",
            "git_tags_url": "https://api.github.com/repos/octocat/Hello-World/git/tags{/sha}",
            "issue_comment_url": "https://api.github.com/repos/octocat/Hello-World/issues/comments{/number}",
            "issue_events_url": "https://api.github.com/repos/octocat/Hello-World/issues/events{/number}",
            "issues_url": "https://api.github.com/repos/octocat/Hello-World/issues{/number}",
            "keys_url": "https://api.github.com/repos/octocat/Hello-World/keys{/key_id}",
            "labels_url": "https://api.github.com/repos/octocat/Hello-World/labels{/name}",
            "languages_url": "https://api.github.com/repos/octocat/Hello-World/languages",
            "merges_url": "https://api.github.com/repos/octocat/Hello-World/merges",
            "milestones_url": "https://api.github.com/repos/octocat/Hello-World/milestones{/number}",
            "notifications_url": "https://api.github.com/repos/octocat/Hello-World/notifications{?since,all,participating}",
            "pulls_url": "https://api.github.com/repos/octocat/Hello-World/pulls{/number}",
            "releases_url": "https://api.github.com/repos/octocat/Hello-World/releases{/id}",
            "stargazers_url": "https://api.github.com/repos/octocat/Hello-World/stargazers",
            "statuses_url": "https://api.github.com/repos/octocat/Hello-World/statuses/{sha}",
            "subscribers_url": "https://api.github.com/repos/octocat/Hello-World/subscribers",
            "subscription_url": "https://api.github.com/repos/octocat/Hello-World/subscription",
            "tags_url": "https://api.github.com/repos/octocat/Hello-World/tags",
            "teams_url": "https://api.github.com/repos/octocat/Hello-World/teams",
            "trees_url": "https://api.github.com/repos/octocat/Hello-World/git/trees{/sha}",
            "hooks_url": "https://api.github.com/repos/octocat/Hello-World/hooks",
        },
        "push_protection_bypassed_by": {
            "login": "monalisa",
            "id": 2,
            "node_id": "MDQ6VXNlcjI=",
            "avatar_url": "https://alambic.github.com/avatars/u/2?",
            "gravatar_id": "",
            "url": "https://api.github.com/users/monalisa",
            "html_url": "https://github.com/monalisa",
            "followers_url": "https://api.github.com/users/monalisa/followers",
            "following_url": "https://api.github.com/users/monalisa/following{/other_user}",
            "gists_url": "https://api.github.com/users/monalisa/gists{/gist_id}",
            "starred_url": "https://api.github.com/users/monalisa/starred{/owner}{/repo}",
            "subscriptions_url": "https://api.github.com/users/monalisa/subscriptions",
            "organizations_url": "https://api.github.com/users/monalisa/orgs",
            "repos_url": "https://api.github.com/users/monalisa/repos",
            "events_url": "https://api.github.com/users/monalisa/events{/privacy}",
            "received_events_url": "https://api.github.com/users/monalisa/received_events",
            "type": "User",
            "site_admin": True,
        },
        "push_protection_bypassed": True,
        "push_protection_bypassed_at": "2020-11-06T21:48:51Z",
        "resolution_comment": "Example comment",
    },
    {
        "number": 1,
        "created_at": "2020-11-06T18:18:30Z",
        "url": "https://api.github.com/repos/owner/repo/secret-scanning/alerts/1",
        "html_url": "https://github.com/owner/repo/security/secret-scanning/1",
        "locations_url": "https://api.github.com/repos/owner/private-repo/secret-scanning/alerts/1/locations",
        "state": "open",
        "resolution": None,
        "resolved_at": None,
        "resolved_by": None,
        "secret_type": "mailchimp_api_key",
        "secret_type_display_name": "Mailchimp API Key",
        "secret": "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX-us2",
        "repository": {
            "id": 1296269,
            "node_id": "MDEwOlJlcG9zaXRvcnkxMjk2MjY5",
            "name": "Hello-World",
            "full_name": "octocat/Hello-World",
            "owner": {
                "login": "octocat",
                "id": 1,
                "node_id": "MDQ6VXNlcjE=",
                "avatar_url": "https://github.com/images/error/octocat_happy.gif",
                "gravatar_id": "",
                "url": "https://api.github.com/users/octocat",
                "html_url": "https://github.com/octocat",
                "followers_url": "https://api.github.com/users/octocat/followers",
                "following_url": "https://api.github.com/users/octocat/following{/other_user}",
                "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
                "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
                "subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
                "organizations_url": "https://api.github.com/users/octocat/orgs",
                "repos_url": "https://api.github.com/users/octocat/repos",
                "events_url": "https://api.github.com/users/octocat/events{/privacy}",
                "received_events_url": "https://api.github.com/users/octocat/received_events",
                "type": "User",
                "site_admin": False,
            },
            "private": False,
            "html_url": "https://github.com/octocat/Hello-World",
            "description": "This your first repo!",
            "fork": False,
            "url": "https://api.github.com/repos/octocat/Hello-World",
            "archive_url": "https://api.github.com/repos/octocat/Hello-World/{archive_format}{/ref}",
            "assignees_url": "https://api.github.com/repos/octocat/Hello-World/assignees{/user}",
            "blobs_url": "https://api.github.com/repos/octocat/Hello-World/git/blobs{/sha}",
            "branches_url": "https://api.github.com/repos/octocat/Hello-World/branches{/branch}",
            "collaborators_url": "https://api.github.com/repos/octocat/Hello-World/collaborators{/collaborator}",
            "comments_url": "https://api.github.com/repos/octocat/Hello-World/comments{/number}",
            "commits_url": "https://api.github.com/repos/octocat/Hello-World/commits{/sha}",
            "compare_url": "https://api.github.com/repos/octocat/Hello-World/compare/{base}...{head}",
            "contents_url": "https://api.github.com/repos/octocat/Hello-World/contents/{+path}",
            "contributors_url": "https://api.github.com/repos/octocat/Hello-World/contributors",
            "deployments_url": "https://api.github.com/repos/octocat/Hello-World/deployments",
            "downloads_url": "https://api.github.com/repos/octocat/Hello-World/downloads",
            "events_url": "https://api.github.com/repos/octocat/Hello-World/events",
            "forks_url": "https://api.github.com/repos/octocat/Hello-World/forks",
            "git_commits_url": "https://api.github.com/repos/octocat/Hello-World/git/commits{/sha}",
            "git_refs_url": "https://api.github.com/repos/octocat/Hello-World/git/refs{/sha}",
            "git_tags_url": "https://api.github.com/repos/octocat/Hello-World/git/tags{/sha}",
            "issue_comment_url": "https://api.github.com/repos/octocat/Hello-World/issues/comments{/number}",
            "issue_events_url": "https://api.github.com/repos/octocat/Hello-World/issues/events{/number}",
            "issues_url": "https://api.github.com/repos/octocat/Hello-World/issues{/number}",
            "keys_url": "https://api.github.com/repos/octocat/Hello-World/keys{/key_id}",
            "labels_url": "https://api.github.com/repos/octocat/Hello-World/labels{/name}",
            "languages_url": "https://api.github.com/repos/octocat/Hello-World/languages",
            "merges_url": "https://api.github.com/repos/octocat/Hello-World/merges",
            "milestones_url": "https://api.github.com/repos/octocat/Hello-World/milestones{/number}",
            "notifications_url": "https://api.github.com/repos/octocat/Hello-World/notifications{?since,all,participating}",
            "pulls_url": "https://api.github.com/repos/octocat/Hello-World/pulls{/number}",
            "releases_url": "https://api.github.com/repos/octocat/Hello-World/releases{/id}",
            "stargazers_url": "https://api.github.com/repos/octocat/Hello-World/stargazers",
            "statuses_url": "https://api.github.com/repos/octocat/Hello-World/statuses/{sha}",
            "subscribers_url": "https://api.github.com/repos/octocat/Hello-World/subscribers",
            "subscription_url": "https://api.github.com/repos/octocat/Hello-World/subscription",
            "tags_url": "https://api.github.com/repos/octocat/Hello-World/tags",
            "teams_url": "https://api.github.com/repos/octocat/Hello-World/teams",
            "trees_url": "https://api.github.com/repos/octocat/Hello-World/git/trees{/sha}",
            "hooks_url": "https://api.github.com/repos/octocat/Hello-World/hooks",
        },
        "push_protection_bypassed_by": None,
        "push_protection_bypassed": False,
        "push_protection_bypassed_at": None,
        "resolution_comment": None,
    },
]


class GitHubAsyncRESTSecretScanningTestCase(GitHubAsyncRESTTestCase):
    api_cls = GitHubAsyncRESTSecretScanning

    async def test_enterprise_alerts(self):
        response = create_response()
        response.json.return_value = ALERTS

        self.client.get_all.return_value = AsyncIteratorMock([response])

        async_it = aiter(self.api.enterprise_alerts("foo"))
        alert = await anext(async_it)
        self.assertEqual(alert.number, 2)
        alert = await anext(async_it)
        self.assertEqual(alert.number, 1)

        with self.assertRaises(StopAsyncIteration):
            await anext(async_it)

        self.client.get_all.assert_called_once_with(
            "/enterprises/foo/secret-scanning/alerts",
            params={"per_page": "100", "sort": "created", "direction": "desc"},
        )

    async def test_enterprise_alerts_state(self):
        response = create_response()
        response.json.return_value = ALERTS

        self.client.get_all.return_value = AsyncIteratorMock([response])

        async_it = aiter(
            self.api.enterprise_alerts("foo", state=AlertState.RESOLVED)
        )
        alert = await anext(async_it)
        self.assertEqual(alert.number, 2)
        alert = await anext(async_it)
        self.assertEqual(alert.number, 1)

        with self.assertRaises(StopAsyncIteration):
            await anext(async_it)

        self.client.get_all.assert_called_once_with(
            "/enterprises/foo/secret-scanning/alerts",
            params={
                "per_page": "100",
                "sort": "created",
                "direction": "desc",
                "state": "resolved",
            },
        )

    async def test_enterprise_alerts_secret_types(self):
        response = create_response()
        response.json.return_value = ALERTS

        self.client.get_all.return_value = AsyncIteratorMock([response])

        async_it = aiter(
            self.api.enterprise_alerts(
                "foo",
                secret_types=[
                    "google_api_key",
                    "hashicorp_vault_service_token",
                ],
            )
        )
        alert = await anext(async_it)
        self.assertEqual(alert.number, 2)
        alert = await anext(async_it)
        self.assertEqual(alert.number, 1)

        with self.assertRaises(StopAsyncIteration):
            await anext(async_it)

        self.client.get_all.assert_called_once_with(
            "/enterprises/foo/secret-scanning/alerts",
            params={
                "per_page": "100",
                "sort": "created",
                "direction": "desc",
                "secret_type": "google_api_key,hashicorp_vault_service_token",
            },
        )

    async def test_enterprise_alerts_resolutions(self):
        response = create_response()
        response.json.return_value = ALERTS

        self.client.get_all.return_value = AsyncIteratorMock([response])

        async_it = aiter(
            self.api.enterprise_alerts(
                "foo",
                resolutions=["false_positive", "wont_fix", "revoked"],
            )
        )
        alert = await anext(async_it)
        self.assertEqual(alert.number, 2)
        alert = await anext(async_it)
        self.assertEqual(alert.number, 1)

        with self.assertRaises(StopAsyncIteration):
            await anext(async_it)

        self.client.get_all.assert_called_once_with(
            "/enterprises/foo/secret-scanning/alerts",
            params={
                "per_page": "100",
                "sort": "created",
                "direction": "desc",
                "resolution": "false_positive,wont_fix,revoked",
            },
        )

    async def test_enterprise_alerts_sort(self):
        response = create_response()
        response.json.return_value = ALERTS

        self.client.get_all.return_value = AsyncIteratorMock([response])

        async_it = aiter(
            self.api.enterprise_alerts("foo", sort=AlertSort.UPDATED)
        )
        alert = await anext(async_it)
        self.assertEqual(alert.number, 2)
        alert = await anext(async_it)
        self.assertEqual(alert.number, 1)

        with self.assertRaises(StopAsyncIteration):
            await anext(async_it)

        self.client.get_all.assert_called_once_with(
            "/enterprises/foo/secret-scanning/alerts",
            params={
                "per_page": "100",
                "sort": "updated",
                "direction": "desc",
            },
        )

    async def test_enterprise_alerts_direction(self):
        response = create_response()
        response.json.return_value = ALERTS

        self.client.get_all.return_value = AsyncIteratorMock([response])

        async_it = aiter(
            self.api.enterprise_alerts("foo", direction=SortOrder.ASC)
        )
        alert = await anext(async_it)
        self.assertEqual(alert.number, 2)
        alert = await anext(async_it)
        self.assertEqual(alert.number, 1)

        with self.assertRaises(StopAsyncIteration):
            await anext(async_it)

        self.client.get_all.assert_called_once_with(
            "/enterprises/foo/secret-scanning/alerts",
            params={
                "per_page": "100",
                "sort": "created",
                "direction": "asc",
            },
        )

    async def test_organization_alerts(self):
        response = create_response()
        response.json.return_value = ALERTS

        self.client.get_all.return_value = AsyncIteratorMock([response])

        async_it = aiter(self.api.organization_alerts("foo"))
        alert = await anext(async_it)
        self.assertEqual(alert.number, 2)
        alert = await anext(async_it)
        self.assertEqual(alert.number, 1)

        with self.assertRaises(StopAsyncIteration):
            await anext(async_it)

        self.client.get_all.assert_called_once_with(
            "/orgs/foo/secret-scanning/alerts",
            params={"per_page": "100", "sort": "created", "direction": "desc"},
        )

    async def test_organization_alerts_state(self):
        response = create_response()
        response.json.return_value = ALERTS

        self.client.get_all.return_value = AsyncIteratorMock([response])

        async_it = aiter(
            self.api.organization_alerts("foo", state=AlertState.RESOLVED)
        )
        alert = await anext(async_it)
        self.assertEqual(alert.number, 2)
        alert = await anext(async_it)
        self.assertEqual(alert.number, 1)

        with self.assertRaises(StopAsyncIteration):
            await anext(async_it)

        self.client.get_all.assert_called_once_with(
            "/orgs/foo/secret-scanning/alerts",
            params={
                "per_page": "100",
                "sort": "created",
                "direction": "desc",
                "state": "resolved",
            },
        )

    async def test_organization_alerts_secret_types(self):
        response = create_response()
        response.json.return_value = ALERTS

        self.client.get_all.return_value = AsyncIteratorMock([response])

        async_it = aiter(
            self.api.organization_alerts(
                "foo",
                secret_types=[
                    "google_api_key",
                    "hashicorp_vault_service_token",
                ],
            )
        )
        alert = await anext(async_it)
        self.assertEqual(alert.number, 2)
        alert = await anext(async_it)
        self.assertEqual(alert.number, 1)

        with self.assertRaises(StopAsyncIteration):
            await anext(async_it)

        self.client.get_all.assert_called_once_with(
            "/orgs/foo/secret-scanning/alerts",
            params={
                "per_page": "100",
                "sort": "created",
                "direction": "desc",
                "secret_type": "google_api_key,hashicorp_vault_service_token",
            },
        )

    async def test_organization_alerts_resolutions(self):
        response = create_response()
        response.json.return_value = ALERTS

        self.client.get_all.return_value = AsyncIteratorMock([response])

        async_it = aiter(
            self.api.organization_alerts(
                "foo",
                resolutions=["false_positive", "wont_fix", "revoked"],
            )
        )
        alert = await anext(async_it)
        self.assertEqual(alert.number, 2)
        alert = await anext(async_it)
        self.assertEqual(alert.number, 1)

        with self.assertRaises(StopAsyncIteration):
            await anext(async_it)

        self.client.get_all.assert_called_once_with(
            "/orgs/foo/secret-scanning/alerts",
            params={
                "per_page": "100",
                "sort": "created",
                "direction": "desc",
                "resolution": "false_positive,wont_fix,revoked",
            },
        )

    async def test_organization_alerts_sort(self):
        response = create_response()
        response.json.return_value = ALERTS

        self.client.get_all.return_value = AsyncIteratorMock([response])

        async_it = aiter(
            self.api.organization_alerts("foo", sort=AlertSort.UPDATED)
        )
        alert = await anext(async_it)
        self.assertEqual(alert.number, 2)
        alert = await anext(async_it)
        self.assertEqual(alert.number, 1)

        with self.assertRaises(StopAsyncIteration):
            await anext(async_it)

        self.client.get_all.assert_called_once_with(
            "/orgs/foo/secret-scanning/alerts",
            params={
                "per_page": "100",
                "sort": "updated",
                "direction": "desc",
            },
        )

    async def test_organization_alerts_direction(self):
        response = create_response()
        response.json.return_value = ALERTS

        self.client.get_all.return_value = AsyncIteratorMock([response])

        async_it = aiter(
            self.api.organization_alerts("foo", direction=SortOrder.ASC)
        )
        alert = await anext(async_it)
        self.assertEqual(alert.number, 2)
        alert = await anext(async_it)
        self.assertEqual(alert.number, 1)

        with self.assertRaises(StopAsyncIteration):
            await anext(async_it)

        self.client.get_all.assert_called_once_with(
            "/orgs/foo/secret-scanning/alerts",
            params={
                "per_page": "100",
                "sort": "created",
                "direction": "asc",
            },
        )

    async def test_alerts(self):
        response = create_response()
        response.json.return_value = ALERTS

        self.client.get_all.return_value = AsyncIteratorMock([response])

        async_it = aiter(self.api.alerts("foo/bar"))
        alert = await anext(async_it)
        self.assertEqual(alert.number, 2)
        alert = await anext(async_it)
        self.assertEqual(alert.number, 1)

        with self.assertRaises(StopAsyncIteration):
            await anext(async_it)

        self.client.get_all.assert_called_once_with(
            "/repos/foo/bar/secret-scanning/alerts",
            params={"per_page": "100", "sort": "created", "direction": "desc"},
        )

    async def test_alerts_state(self):
        response = create_response()
        response.json.return_value = ALERTS

        self.client.get_all.return_value = AsyncIteratorMock([response])

        async_it = aiter(self.api.alerts("foo/bar", state=AlertState.RESOLVED))
        alert = await anext(async_it)
        self.assertEqual(alert.number, 2)
        alert = await anext(async_it)
        self.assertEqual(alert.number, 1)

        with self.assertRaises(StopAsyncIteration):
            await anext(async_it)

        self.client.get_all.assert_called_once_with(
            "/repos/foo/bar/secret-scanning/alerts",
            params={
                "per_page": "100",
                "sort": "created",
                "direction": "desc",
                "state": "resolved",
            },
        )

    async def test_alerts_secret_types(self):
        response = create_response()
        response.json.return_value = ALERTS

        self.client.get_all.return_value = AsyncIteratorMock([response])

        async_it = aiter(
            self.api.alerts(
                "foo/bar",
                secret_types=[
                    "google_api_key",
                    "hashicorp_vault_service_token",
                ],
            )
        )
        alert = await anext(async_it)
        self.assertEqual(alert.number, 2)
        alert = await anext(async_it)
        self.assertEqual(alert.number, 1)

        with self.assertRaises(StopAsyncIteration):
            await anext(async_it)

        self.client.get_all.assert_called_once_with(
            "/repos/foo/bar/secret-scanning/alerts",
            params={
                "per_page": "100",
                "sort": "created",
                "direction": "desc",
                "secret_type": "google_api_key,hashicorp_vault_service_token",
            },
        )

    async def test_alerts_resolutions(self):
        response = create_response()
        response.json.return_value = ALERTS

        self.client.get_all.return_value = AsyncIteratorMock([response])

        async_it = aiter(
            self.api.alerts(
                "foo/bar",
                resolutions=["false_positive", "wont_fix", "revoked"],
            )
        )
        alert = await anext(async_it)
        self.assertEqual(alert.number, 2)
        alert = await anext(async_it)
        self.assertEqual(alert.number, 1)

        with self.assertRaises(StopAsyncIteration):
            await anext(async_it)

        self.client.get_all.assert_called_once_with(
            "/repos/foo/bar/secret-scanning/alerts",
            params={
                "per_page": "100",
                "sort": "created",
                "direction": "desc",
                "resolution": "false_positive,wont_fix,revoked",
            },
        )

    async def test_alerts_sort(self):
        response = create_response()
        response.json.return_value = ALERTS

        self.client.get_all.return_value = AsyncIteratorMock([response])

        async_it = aiter(self.api.alerts("foo/bar", sort=AlertSort.UPDATED))
        alert = await anext(async_it)
        self.assertEqual(alert.number, 2)
        alert = await anext(async_it)
        self.assertEqual(alert.number, 1)

        with self.assertRaises(StopAsyncIteration):
            await anext(async_it)

        self.client.get_all.assert_called_once_with(
            "/repos/foo/bar/secret-scanning/alerts",
            params={
                "per_page": "100",
                "sort": "updated",
                "direction": "desc",
            },
        )

    async def test_alerts_direction(self):
        response = create_response()
        response.json.return_value = ALERTS

        self.client.get_all.return_value = AsyncIteratorMock([response])

        async_it = aiter(self.api.alerts("foo/bar", direction=SortOrder.ASC))
        alert = await anext(async_it)
        self.assertEqual(alert.number, 2)
        alert = await anext(async_it)
        self.assertEqual(alert.number, 1)

        with self.assertRaises(StopAsyncIteration):
            await anext(async_it)

        self.client.get_all.assert_called_once_with(
            "/repos/foo/bar/secret-scanning/alerts",
            params={
                "per_page": "100",
                "sort": "created",
                "direction": "asc",
            },
        )

    async def test_alert(self):
        response = create_response()
        response.json.return_value = {
            "number": 42,
            "created_at": "2020-11-06T18:18:30Z",
            "url": "https://api.github.com/repos/owner/private-repo/secret-scanning/alerts/42",
            "html_url": "https://github.com/owner/private-repo/security/secret-scanning/42",
            "locations_url": "https://api.github.com/repos/owner/private-repo/secret-scanning/alerts/42/locations",
            "state": "open",
            "resolution": None,
            "resolved_at": None,
            "resolved_by": None,
            "secret_type": "mailchimp_api_key",
            "secret_type_display_name": "Mailchimp API Key",
            "secret": "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX-us2",
            "push_protection_bypassed_by": None,
            "push_protection_bypassed": False,
            "push_protection_bypassed_at": None,
            "resolution_comment": None,
        }
        self.client.get.return_value = response

        alert = await self.api.alert(
            "foo/bar",
            42,
        )

        self.client.get.assert_awaited_once_with(
            "/repos/foo/bar/secret-scanning/alerts/42",
        )

        self.assertEqual(alert.number, 42)

    async def test_update(self):
        response = create_response()
        response.json.return_value = {
            "number": 42,
            "created_at": "2020-11-06T18:18:30Z",
            "url": "https://api.github.com/repos/owner/private-repo/secret-scanning/alerts/42",
            "html_url": "https://github.com/owner/private-repo/security/secret-scanning/42",
            "locations_url": "https://api.github.com/repos/owner/private-repo/secret-scanning/alerts/42/locations",
            "state": "resolved",
            "resolution": "used_in_tests",
            "resolved_at": "2020-11-16T22:42:07Z",
            "resolved_by": {
                "login": "monalisa",
                "id": 2,
                "node_id": "MDQ6VXNlcjI=",
                "avatar_url": "https://alambic.github.com/avatars/u/2?",
                "gravatar_id": "",
                "url": "https://api.github.com/users/monalisa",
                "html_url": "https://github.com/monalisa",
                "followers_url": "https://api.github.com/users/monalisa/followers",
                "following_url": "https://api.github.com/users/monalisa/following{/other_user}",
                "gists_url": "https://api.github.com/users/monalisa/gists{/gist_id}",
                "starred_url": "https://api.github.com/users/monalisa/starred{/owner}{/repo}",
                "subscriptions_url": "https://api.github.com/users/monalisa/subscriptions",
                "organizations_url": "https://api.github.com/users/monalisa/orgs",
                "repos_url": "https://api.github.com/users/monalisa/repos",
                "events_url": "https://api.github.com/users/monalisa/events{/privacy}",
                "received_events_url": "https://api.github.com/users/monalisa/received_events",
                "type": "User",
                "site_admin": True,
            },
            "secret_type": "mailchimp_api_key",
            "secret_type_display_name": "Mailchimp API Key",
            "secret": "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX-us2",
            "push_protection_bypassed": False,
            "push_protection_bypassed_by": None,
            "push_protection_bypassed_at": None,
            "resolution_comment": "Example comment",
        }
        self.client.patch.return_value = response

        alert = await self.api.update_alert(
            "foo/bar",
            1,
            AlertState.RESOLVED,
            resolution=Resolution.USED_IN_TESTS,
            resolution_comment="Only used in tests",
        )

        self.client.patch.assert_awaited_once_with(
            "/repos/foo/bar/secret-scanning/alerts/1",
            data={
                "state": "resolved",
                "resolution": "used_in_tests",
                "resolution_comment": "Only used in tests",
            },
        )

        self.assertEqual(alert.number, 42)
        self.assertIsNone(alert.repository)

    async def test_alerts_locations(self):
        response = create_response()
        response.json.return_value = [
            {
                "type": "commit",
                "details": {
                    "path": "/example/secrets.txt",
                    "start_line": 1,
                    "end_line": 1,
                    "start_column": 1,
                    "end_column": 64,
                    "blob_sha": "af5626b4a114abcb82d63db7c8082c3c4756e51b",
                    "blob_url": "https://api.github.com/repos/octocat/hello-world/git/blobs/af5626b4a114abcb82d63db7c8082c3c4756e51b",
                    "commit_sha": "f14d7debf9775f957cf4f1e8176da0786431f72b",
                    "commit_url": "https://api.github.com/repos/octocat/hello-world/git/commits/f14d7debf9775f957cf4f1e8176da0786431f72b",
                },
            },
            {
                "type": "issue_title",
                "details": {
                    "issue_title_url": "https://api.github.com/repos/octocat/Hello-World/issues/1347"
                },
            },
            {
                "type": "issue_body",
                "details": {
                    "issue_body_url": "https://api.github.com/repos/octocat/Hello-World/issues/1347"
                },
            },
            {
                "type": "issue_comment",
                "details": {
                    "issue_comment_url": "https://api.github.com/repos/octocat/Hello-World/issues/comments/1081119451"
                },
            },
        ]

        self.client.get_all.return_value = AsyncIteratorMock([response])

        async_it = aiter(self.api.locations("foo/bar", 123))
        location = await anext(async_it)
        self.assertEqual(location.type, LocationType.COMMIT)
        location = await anext(async_it)
        self.assertEqual(location.type, LocationType.ISSUE_TITLE)
        location = await anext(async_it)
        self.assertEqual(location.type, LocationType.ISSUE_BODY)
        location = await anext(async_it)
        self.assertEqual(location.type, LocationType.ISSUE_COMMENT)

        with self.assertRaises(StopAsyncIteration):
            await anext(async_it)

        self.client.get_all.assert_called_once_with(
            "/repos/foo/bar/secret-scanning/alerts/123/locations",
            params={
                "per_page": "100",
            },
        )
pontos-25.3.2/tests/github/api/test_tags.py000066400000000000000000000212471476255566300207220ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later
#

# pylint: disable=too-many-lines, redefined-builtin, line-too-long
# ruff: noqa: E501

from datetime import datetime, timezone
from unittest.mock import MagicMock

import httpx

from pontos.github.api.tags import GitHubAsyncRESTTags
from pontos.github.models.tag import GitObjectType, Tag, VerificationReason
from tests import AsyncIteratorMock, aiter, anext
from tests.github.api import GitHubAsyncRESTTestCase, create_response

TAG_JSON = {
    "node_id": "MDM6VGFnOTQwYmQzMzYyNDhlZmFlMGY5ZWU1YmM3YjJkNWM5ODU4ODdiMTZhYw==",
    "tag": "v0.0.1",
    "sha": "940bd336248efae0f9ee5bc7b2d5c985887b16ac",
    "url": "https://api.github.com/repos/octocat/Hello-World/git/tags/940bd336248efae0f9ee5bc7b2d5c985887b16ac",
    "message": "initial version",
    "tagger": {
        "name": "Monalisa Octocat",
        "email": "octocat@github.com",
        "date": "2014-11-07T22:01:45Z",
    },
    "object": {
        "type": "commit",
        "sha": "c3d0be41ecbe669545ee3e94d31ed9a4bc91ee3c",
        "url": "https://api.github.com/repos/octocat/Hello-World/git/commits/c3d0be41ecbe669545ee3e94d31ed9a4bc91ee3c",
    },
    "verification": {
        "verified": False,
        "reason": "unsigned",
        "signature": None,
        "payload": None,
    },
}


TAGS_JSON = [
    {
        "name": "v0.1.0",
        "zipball_url": "https://api.github.com/repos/foo/ui/zipball/refs/tags/v0.1.0",
        "tarball_url": "https://api.github.com/repos/foo/ui/tarball/refs/tags/v0.1.0",
        "commit": {
            "sha": "b55408280162c20cf0ffef155618270d26fe7352",
            "url": "https://api.github.com/repos/foo/ui/commits/b55408280162c20cf0ffef155618270d26fe7352",
        },
        "node_id": "REF_kwDOHeuT1rByZWZzL3RhZ3MvdjAuMS4w",
    },
    {
        "name": "0.0.72",
        "zipball_url": "https://api.github.com/repos/foo/ui/zipball/refs/tags/0.0.72",
        "tarball_url": "https://api.github.com/repos/foo/ui/tarball/refs/tags/0.0.72",
        "commit": {
            "sha": "fd54097278df75332917d9d1a2d940083513a6b2",
            "url": "https://api.github.com/repos/foo/ui/commits/fd54097278df75332917d9d1a2d940083513a6b2",
        },
        "node_id": "REF_kwDOHeuT1rByZWZzL3RhZ3MvMC4wLjcy",
    },
]


class GitHubAsyncRESTTagsTestCase(GitHubAsyncRESTTestCase):
    api_cls = GitHubAsyncRESTTags

    def assertTag(self, tag: Tag):  # pylint: disable=invalid-name
        self.assertEqual(
            tag.node_id,
            "MDM6VGFnOTQwYmQzMzYyNDhlZmFlMGY5ZWU1YmM3YjJkNWM5ODU4ODdiMTZhYw==",
        )
        self.assertEqual(tag.tag, "v0.0.1")
        self.assertEqual(tag.sha, "940bd336248efae0f9ee5bc7b2d5c985887b16ac")
        self.assertEqual(
            tag.url,
            "https://api.github.com/repos/octocat/Hello-World/git/tags/940bd336248efae0f9ee5bc7b2d5c985887b16ac",
        )
        self.assertEqual(tag.message, "initial version")

        tagger = tag.tagger
        self.assertEqual(tagger.name, "Monalisa Octocat")
        self.assertEqual(tagger.email, "octocat@github.com")
        self.assertEqual(
            tagger.date, datetime(2014, 11, 7, 22, 1, 45, tzinfo=timezone.utc)
        )

        tag_object = tag.object
        self.assertEqual(tag_object.type, GitObjectType.COMMIT)
        self.assertEqual(
            tag_object.sha, "c3d0be41ecbe669545ee3e94d31ed9a4bc91ee3c"
        )
        self.assertEqual(
            tag_object.url,
            "https://api.github.com/repos/octocat/Hello-World/git/commits/c3d0be41ecbe669545ee3e94d31ed9a4bc91ee3c",
        )

        verification = tag.verification
        self.assertFalse(verification.verified)
        self.assertEqual(verification.reason, VerificationReason.UNSIGNED)
        self.assertIsNone(verification.payload)
        self.assertIsNone(verification.signature)

    async def test_create(self):
        response = create_response()
        response.json.return_value = TAG_JSON
        self.client.post.return_value = response

        tag = await self.api.create(
            "octocat/Hello-World",
            "v0.0.1",
            "initial version",
            "Monalisa Octocat",
            "octocat@github.com",
            "c3d0be41ecbe669545ee3e94d31ed9a4bc91ee3c",
        )

        self.client.post.assert_awaited_once_with(
            "/repos/octocat/Hello-World/git/tags",
            data={
                "tag": "v0.0.1",
                "message": "initial version",
                "object": "c3d0be41ecbe669545ee3e94d31ed9a4bc91ee3c",
                "type": "commit",
                "tagger": {
                    "name": "Monalisa Octocat",
                    "email": "octocat@github.com",
                },
            },
        )

        self.assertTag(tag)

    async def test_create_failure(self):
        response = create_response()
        self.client.post.side_effect = httpx.HTTPStatusError(
            "404", request=MagicMock(), response=response
        )

        with self.assertRaises(httpx.HTTPStatusError):
            await self.api.create(
                "octocat/Hello-World",
                "v0.0.1",
                "initial version",
                "Monalisa Octocat",
                "octocat@github.com",
                "c3d0be41ecbe669545ee3e94d31ed9a4bc91ee3c",
            )

        self.client.post.assert_awaited_once_with(
            "/repos/octocat/Hello-World/git/tags",
            data={
                "tag": "v0.0.1",
                "message": "initial version",
                "object": "c3d0be41ecbe669545ee3e94d31ed9a4bc91ee3c",
                "type": "commit",
                "tagger": {
                    "name": "Monalisa Octocat",
                    "email": "octocat@github.com",
                },
            },
        )

    async def test_create_tag_reference(self):
        response = create_response()
        self.client.post.return_value = response

        await self.api.create_tag_reference(
            "foo/bar", "v1.0.0", "c3d0be41ecbe669545ee3e94d31ed9a4bc91ee3c"
        )

        self.client.post.assert_awaited_once_with(
            "/repos/foo/bar/git/refs",
            data={
                "ref": "refs/tags/v1.0.0",
                "sha": "c3d0be41ecbe669545ee3e94d31ed9a4bc91ee3c",
            },
        )

    async def test_create_tag_reference_failure(self):
        response = create_response()
        self.client.post.side_effect = httpx.HTTPStatusError(
            "404", request=MagicMock(), response=response
        )

        with self.assertRaises(httpx.HTTPStatusError):
            await self.api.create_tag_reference(
                "foo/bar", "v1.0.0", "c3d0be41ecbe669545ee3e94d31ed9a4bc91ee3c"
            )

        self.client.post.assert_awaited_once_with(
            "/repos/foo/bar/git/refs",
            data={
                "ref": "refs/tags/v1.0.0",
                "sha": "c3d0be41ecbe669545ee3e94d31ed9a4bc91ee3c",
            },
        )

    async def test_get(self):
        response = create_response()
        response.json.return_value = TAG_JSON
        self.client.get.return_value = response

        tag = await self.api.get("octocat/Hello-World", "v0.0.1")

        self.client.get.assert_awaited_once_with(
            "/repos/octocat/Hello-World/git/tags/v0.0.1"
        )

        self.assertTag(tag)

    async def test_get_failure(self):
        response = create_response()
        self.client.get.side_effect = httpx.HTTPStatusError(
            "404", request=MagicMock(), response=response
        )

        with self.assertRaises(httpx.HTTPStatusError):
            await self.api.get("octocat/Hello-World", "v0.0.1")

        self.client.get.assert_awaited_once_with(
            "/repos/octocat/Hello-World/git/tags/v0.0.1"
        )

    async def test_get_all(self):
        response = create_response()
        response.json.return_value = TAGS_JSON
        self.client.get_all.return_value = AsyncIteratorMock([response])

        async_it = aiter(self.api.get_all("foo"))

        tag = await anext(async_it)
        self.assertEqual(tag.name, "v0.1.0")
        self.assertEqual(
            tag.commit.sha, "b55408280162c20cf0ffef155618270d26fe7352"
        )
        self.assertEqual(
            tag.commit.url,
            "https://api.github.com/repos/foo/ui/commits/b55408280162c20cf0ffef155618270d26fe7352",
        )
        self.assertEqual(
            tag.zipball_url,
            "https://api.github.com/repos/foo/ui/zipball/refs/tags/v0.1.0",
        )
        self.assertEqual(
            tag.tarball_url,
            "https://api.github.com/repos/foo/ui/tarball/refs/tags/v0.1.0",
        )
        self.assertEqual(tag.node_id, "REF_kwDOHeuT1rByZWZzL3RhZ3MvdjAuMS4w")
        tag2 = await anext(async_it)
        self.assertEqual(tag2.name, "0.0.72")
pontos-25.3.2/tests/github/api/test_teams.py000066400000000000000000001130321476255566300210670ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later
#

# pylint: disable=redefined-builtin, line-too-long

from unittest.mock import MagicMock

import httpx

from pontos.github.api.teams import GitHubAsyncRESTTeams, TeamPrivacy, TeamRole
from pontos.github.models.base import Permission
from tests import AsyncIteratorMock, aiter, anext
from tests.github.api import GitHubAsyncRESTTestCase, create_response


class GitHubAsyncRESTTeamsTestCase(GitHubAsyncRESTTestCase):
    api_cls = GitHubAsyncRESTTeams

    async def test_get_all(self):
        response1 = create_response()
        response1.json.return_value = [
            {
                "id": 1,
                "node_id": "MDQ6VGVhbTE=",
                "url": "https://api.github.com/teams/1",
                "html_url": "https://github.com/orgs/github/teams/justice-league",
                "name": "Justice League",
                "slug": "justice-league",
                "description": "A great team.",
                "privacy": "closed",
                "permission": "admin",
                "members_url": "https://api.github.com/teams/1/members{/member}",
                "repositories_url": "https://api.github.com/teams/1/repos",
                "parent": None,
            }
        ]
        response2 = create_response()
        response2.json.return_value = [
            {
                "id": 2,
                "node_id": "MDQ6VGVhbTE=",
                "url": "https://api.github.com/teams/1",
                "html_url": "https://github.com/orgs/github/teams/justice-league",
                "name": "Justice League",
                "slug": "justice-league",
                "description": "A great team.",
                "privacy": "closed",
                "permission": "admin",
                "members_url": "https://api.github.com/teams/1/members{/member}",
                "repositories_url": "https://api.github.com/teams/1/repos",
                "parent": None,
            },
            {
                "id": 3,
                "node_id": "MDQ6VGVhbTE=",
                "url": "https://api.github.com/teams/1",
                "html_url": "https://github.com/orgs/github/teams/justice-league",
                "name": "Justice League",
                "slug": "justice-league",
                "description": "A great team.",
                "privacy": "closed",
                "permission": "admin",
                "members_url": "https://api.github.com/teams/1/members{/member}",
                "repositories_url": "https://api.github.com/teams/1/repos",
                "parent": None,
            },
        ]

        self.client.get_all.return_value = AsyncIteratorMock(
            [response1, response2]
        )

        async_it = aiter(self.api.get_all("foo"))
        team = await anext(async_it)
        self.assertEqual(team.id, 1)
        team = await anext(async_it)
        self.assertEqual(team.id, 2)
        team = await anext(async_it)
        self.assertEqual(team.id, 3)

        with self.assertRaises(StopAsyncIteration):
            await anext(async_it)

        self.client.get_all.assert_called_once_with(
            "/orgs/foo/teams",
            params={"per_page": "100"},
        )

    async def test_create(self):
        response = create_response()
        response.json.return_value = {
            "id": 1,
            "node_id": "MDQ6VGVhbTE=",
            "url": "https://api.github.com/teams/1",
            "html_url": "https://github.com/orgs/github/teams/justice-league",
            "name": "Justice League",
            "slug": "justice-league",
            "description": "A great team.",
            "privacy": "closed",
            "permission": "admin",
            "members_url": "https://api.github.com/teams/1/members{/member}",
            "repositories_url": "https://api.github.com/teams/1/repos",
            "parent": None,
        }
        self.client.post.return_value = response

        team = await self.api.create(
            "foo",
            "bar",
            description="A description",
            maintainers=["foo", "bar"],
            repo_names=["foo/bar", "foo/baz"],
            privacy=TeamPrivacy.CLOSED,
            parent_team_id="123",
        )

        self.client.post.assert_awaited_once_with(
            "/orgs/foo/teams",
            data={
                "name": "bar",
                "description": "A description",
                "maintainers": ["foo", "bar"],
                "repo_names": ["foo/bar", "foo/baz"],
                "privacy": "closed",
                "parent_team_id": "123",
            },
        )

        self.assertEqual(team.id, 1)

    async def test_create_failure(self):
        response = create_response()
        self.client.post.side_effect = httpx.HTTPStatusError(
            "404", request=MagicMock(), response=response
        )

        with self.assertRaises(httpx.HTTPStatusError):
            await self.api.create(
                "foo",
                "bar",
                description="A description",
                maintainers=["foo", "bar"],
                repo_names=["foo/bar", "foo/baz"],
                privacy=TeamPrivacy.CLOSED,
                parent_team_id="123",
            )

        self.client.post.assert_awaited_once_with(
            "/orgs/foo/teams",
            data={
                "name": "bar",
                "description": "A description",
                "maintainers": ["foo", "bar"],
                "repo_names": ["foo/bar", "foo/baz"],
                "privacy": "closed",
                "parent_team_id": "123",
            },
        )

    async def test_get(self):
        response = create_response()
        response.json.return_value = {
            "id": 1,
            "node_id": "MDQ6VGVhbTE=",
            "url": "https://api.github.com/teams/1",
            "html_url": "https://github.com/orgs/github/teams/justice-league",
            "name": "Justice League",
            "slug": "justice-league",
            "description": "A great team.",
            "privacy": "closed",
            "permission": "admin",
            "members_url": "https://api.github.com/teams/1/members{/member}",
            "repositories_url": "https://api.github.com/teams/1/repos",
            "parent": None,
        }
        self.client.get.return_value = response

        team = await self.api.get("foo", "bar")

        self.client.get.assert_awaited_once_with(
            "/orgs/foo/teams/bar",
        )

        self.assertEqual(team.id, 1)

    async def test_get_failure(self):
        response = create_response()
        self.client.get.side_effect = httpx.HTTPStatusError(
            "404", request=MagicMock(), response=response
        )

        with self.assertRaises(httpx.HTTPStatusError):
            await self.api.get("foo", "bar")

        self.client.get.assert_awaited_once_with(
            "/orgs/foo/teams/bar",
        )

    async def test_update(self):
        response = create_response()
        response.json.return_value = {
            "id": 1,
            "node_id": "MDQ6VGVhbTE=",
            "url": "https://api.github.com/teams/1",
            "html_url": "https://github.com/orgs/github/teams/justice-league",
            "name": "baz",
            "slug": "justice-league",
            "description": "A description",
            "privacy": "closed",
            "permission": "admin",
            "members_url": "https://api.github.com/teams/1/members{/member}",
            "repositories_url": "https://api.github.com/teams/1/repos",
            "parent": None,
        }
        self.client.post.return_value = response

        team = await self.api.update(
            "foo",
            "bar",
            name="baz",
            description="A description",
            privacy=TeamPrivacy.CLOSED,
            parent_team_id="123",
        )

        self.client.post.assert_awaited_once_with(
            "/orgs/foo/teams/bar",
            data={
                "name": "baz",
                "description": "A description",
                "privacy": "closed",
                "parent_team_id": "123",
            },
        )

        self.assertEqual(team.id, 1)

    async def test_update_failure(self):
        response = create_response()
        self.client.post.side_effect = httpx.HTTPStatusError(
            "404", request=MagicMock(), response=response
        )

        with self.assertRaises(httpx.HTTPStatusError):
            await self.api.update(
                "foo",
                "bar",
                name="baz",
                description="A description",
                privacy=TeamPrivacy.CLOSED,
                parent_team_id="123",
            )

        self.client.post.assert_awaited_once_with(
            "/orgs/foo/teams/bar",
            data={
                "name": "baz",
                "description": "A description",
                "privacy": "closed",
                "parent_team_id": "123",
            },
        )

    async def test_delete(self):
        response = create_response()
        self.client.delete.return_value = response

        await self.api.delete("foo", "bar")

        self.client.delete.assert_awaited_once_with(
            "/orgs/foo/teams/bar",
        )

    async def test_delete_failure(self):
        response = create_response()
        self.client.delete.side_effect = httpx.HTTPStatusError(
            "404", request=MagicMock(), response=response
        )

        with self.assertRaises(httpx.HTTPStatusError):
            await self.api.delete("foo", "bar")

        self.client.delete.assert_awaited_once_with(
            "/orgs/foo/teams/bar",
        )

    async def test_members(self):
        response1 = create_response()
        response1.json.return_value = [
            {
                "id": 1,
                "login": "octocat",
                "node_id": "MDQ6VXNlcjE=",
                "avatar_url": "https://github.com/images/error/octocat_happy.gif",
                "gravatar_id": "",
                "url": "https://api.github.com/users/octocat",
                "html_url": "https://github.com/octocat",
                "followers_url": "https://api.github.com/users/octocat/followers",
                "following_url": "https://api.github.com/users/octocat/following{/other_user}",
                "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
                "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
                "subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
                "organizations_url": "https://api.github.com/users/octocat/orgs",
                "repos_url": "https://api.github.com/users/octocat/repos",
                "events_url": "https://api.github.com/users/octocat/events{/privacy}",
                "received_events_url": "https://api.github.com/users/octocat/received_events",
                "type": "User",
                "site_admin": False,
            }
        ]
        response2 = create_response()
        response2.json.return_value = [
            {
                "id": 2,
                "login": "octocat2",
                "node_id": "MDQ6VXNlcjE=",
                "avatar_url": "https://github.com/images/error/octocat_happy.gif",
                "gravatar_id": "",
                "url": "https://api.github.com/users/octocat",
                "html_url": "https://github.com/octocat",
                "followers_url": "https://api.github.com/users/octocat/followers",
                "following_url": "https://api.github.com/users/octocat/following{/other_user}",
                "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
                "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
                "subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
                "organizations_url": "https://api.github.com/users/octocat/orgs",
                "repos_url": "https://api.github.com/users/octocat/repos",
                "events_url": "https://api.github.com/users/octocat/events{/privacy}",
                "received_events_url": "https://api.github.com/users/octocat/received_events",
                "type": "User",
                "site_admin": False,
            },
            {
                "id": 3,
                "login": "octocat3",
                "node_id": "MDQ6VXNlcjE=",
                "avatar_url": "https://github.com/images/error/octocat_happy.gif",
                "gravatar_id": "",
                "url": "https://api.github.com/users/octocat",
                "html_url": "https://github.com/octocat",
                "followers_url": "https://api.github.com/users/octocat/followers",
                "following_url": "https://api.github.com/users/octocat/following{/other_user}",
                "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
                "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
                "subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
                "organizations_url": "https://api.github.com/users/octocat/orgs",
                "repos_url": "https://api.github.com/users/octocat/repos",
                "events_url": "https://api.github.com/users/octocat/events{/privacy}",
                "received_events_url": "https://api.github.com/users/octocat/received_events",
                "type": "User",
                "site_admin": False,
            },
        ]

        self.client.get_all.return_value = AsyncIteratorMock(
            [response1, response2]
        )

        async_it = aiter(self.api.members("foo", "bar"))
        member = await anext(async_it)
        self.assertEqual(member.id, 1)
        member = await anext(async_it)
        self.assertEqual(member.id, 2)
        member = await anext(async_it)
        self.assertEqual(member.id, 3)

        with self.assertRaises(StopAsyncIteration):
            await anext(async_it)

        self.client.get_all.assert_called_once_with(
            "/orgs/foo/teams/bar/members",
            params={"per_page": "100"},
        )

    async def test_update_member(self):
        response = create_response()
        self.client.put.return_value = response

        await self.api.update_member(
            "foo", "bar", "baz", role=TeamRole.MAINTAINER
        )

        self.client.put.assert_awaited_once_with(
            "/orgs/foo/teams/bar/memberships/baz", data={"role": "maintainer"}
        )

    async def test_add_member(self):
        response = create_response()
        self.client.put.return_value = response

        await self.api.add_member("foo", "bar", "baz", role=TeamRole.MAINTAINER)

        self.client.put.assert_awaited_once_with(
            "/orgs/foo/teams/bar/memberships/baz", data={"role": "maintainer"}
        )

    async def test_update_member_failure(self):
        response = create_response()
        self.client.put.side_effect = httpx.HTTPStatusError(
            "404", request=MagicMock(), response=response
        )

        with self.assertRaises(httpx.HTTPStatusError):
            await self.api.update_member("foo", "bar", "baz")

        self.client.put.assert_awaited_once_with(
            "/orgs/foo/teams/bar/memberships/baz", data={"role": "member"}
        )

    async def test_remove_member(self):
        response = create_response()
        self.client.delete.return_value = response

        await self.api.remove_member("foo", "bar", "baz")

        self.client.delete.assert_awaited_once_with(
            "/orgs/foo/teams/bar/memberships/baz"
        )

    async def test_remove_member_failure(self):
        response = create_response()
        self.client.delete.side_effect = httpx.HTTPStatusError(
            "404", request=MagicMock(), response=response
        )

        with self.assertRaises(httpx.HTTPStatusError):
            await self.api.remove_member("foo", "bar", "baz")

        self.client.delete.assert_awaited_once_with(
            "/orgs/foo/teams/bar/memberships/baz"
        )

    async def test_repositories(self):
        response1 = create_response()
        response1.json.return_value = [
            {
                "id": 1,
                "node_id": "MDEwOlJlcG9zaXRvcnkxMjk2MjY5",
                "name": "Hello-World",
                "full_name": "octocat/Hello-World",
                "owner": {
                    "login": "octocat",
                    "id": 1,
                    "node_id": "MDQ6VXNlcjE=",
                    "avatar_url": "https://github.com/images/error/octocat_happy.gif",
                    "gravatar_id": "",
                    "url": "https://api.github.com/users/octocat",
                    "html_url": "https://github.com/octocat",
                    "followers_url": "https://api.github.com/users/octocat/followers",
                    "following_url": "https://api.github.com/users/octocat/following{/other_user}",
                    "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
                    "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
                    "subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
                    "organizations_url": "https://api.github.com/users/octocat/orgs",
                    "repos_url": "https://api.github.com/users/octocat/repos",
                    "events_url": "https://api.github.com/users/octocat/events{/privacy}",
                    "received_events_url": "https://api.github.com/users/octocat/received_events",
                    "type": "User",
                    "site_admin": False,
                },
                "private": False,
                "html_url": "https://github.com/octocat/Hello-World",
                "description": "This your first repo!",
                "fork": False,
                "url": "https://api.github.com/repos/octocat/Hello-World",
                "archive_url": "https://api.github.com/repos/octocat/Hello-World/{archive_format}{/ref}",
                "assignees_url": "https://api.github.com/repos/octocat/Hello-World/assignees{/user}",
                "blobs_url": "https://api.github.com/repos/octocat/Hello-World/git/blobs{/sha}",
                "branches_url": "https://api.github.com/repos/octocat/Hello-World/branches{/branch}",
                "collaborators_url": "https://api.github.com/repos/octocat/Hello-World/collaborators{/collaborator}",
                "comments_url": "https://api.github.com/repos/octocat/Hello-World/comments{/number}",
                "commits_url": "https://api.github.com/repos/octocat/Hello-World/commits{/sha}",
                "compare_url": "https://api.github.com/repos/octocat/Hello-World/compare/{base}...{head}",
                "contents_url": "https://api.github.com/repos/octocat/Hello-World/contents/{+path}",
                "contributors_url": "https://api.github.com/repos/octocat/Hello-World/contributors",
                "deployments_url": "https://api.github.com/repos/octocat/Hello-World/deployments",
                "downloads_url": "https://api.github.com/repos/octocat/Hello-World/downloads",
                "events_url": "https://api.github.com/repos/octocat/Hello-World/events",
                "forks_url": "https://api.github.com/repos/octocat/Hello-World/forks",
                "git_commits_url": "https://api.github.com/repos/octocat/Hello-World/git/commits{/sha}",
                "git_refs_url": "https://api.github.com/repos/octocat/Hello-World/git/refs{/sha}",
                "git_tags_url": "https://api.github.com/repos/octocat/Hello-World/git/tags{/sha}",
                "git_url": "git:github.com/octocat/Hello-World.git",
                "issue_comment_url": "https://api.github.com/repos/octocat/Hello-World/issues/comments{/number}",
                "issue_events_url": "https://api.github.com/repos/octocat/Hello-World/issues/events{/number}",
                "issues_url": "https://api.github.com/repos/octocat/Hello-World/issues{/number}",
                "keys_url": "https://api.github.com/repos/octocat/Hello-World/keys{/key_id}",
                "labels_url": "https://api.github.com/repos/octocat/Hello-World/labels{/name}",
                "languages_url": "https://api.github.com/repos/octocat/Hello-World/languages",
                "merges_url": "https://api.github.com/repos/octocat/Hello-World/merges",
                "milestones_url": "https://api.github.com/repos/octocat/Hello-World/milestones{/number}",
                "notifications_url": "https://api.github.com/repos/octocat/Hello-World/notifications{?since,all,participating}",
                "pulls_url": "https://api.github.com/repos/octocat/Hello-World/pulls{/number}",
                "releases_url": "https://api.github.com/repos/octocat/Hello-World/releases{/id}",
                "ssh_url": "git@github.com:octocat/Hello-World.git",
                "stargazers_url": "https://api.github.com/repos/octocat/Hello-World/stargazers",
                "statuses_url": "https://api.github.com/repos/octocat/Hello-World/statuses/{sha}",
                "subscribers_url": "https://api.github.com/repos/octocat/Hello-World/subscribers",
                "subscription_url": "https://api.github.com/repos/octocat/Hello-World/subscription",
                "tags_url": "https://api.github.com/repos/octocat/Hello-World/tags",
                "teams_url": "https://api.github.com/repos/octocat/Hello-World/teams",
                "trees_url": "https://api.github.com/repos/octocat/Hello-World/git/trees{/sha}",
                "clone_url": "https://github.com/octocat/Hello-World.git",
                "mirror_url": "git:git.example.com/octocat/Hello-World",
                "hooks_url": "https://api.github.com/repos/octocat/Hello-World/hooks",
                "svn_url": "https://svn.github.com/octocat/Hello-World",
                "homepage": "https://github.com",
                "language": None,
                "forks_count": 9,
                "stargazers_count": 80,
                "watchers_count": 80,
                "size": 108,
                "default_branch": "master",
                "open_issues_count": 0,
                "is_template": False,
                "topics": ["octocat", "atom", "electron", "api"],
                "has_issues": True,
                "has_projects": True,
                "has_wiki": True,
                "has_pages": False,
                "has_downloads": True,
                "has_discussions": False,
                "archived": False,
                "disabled": False,
                "visibility": "public",
                "pushed_at": "2011-01-26T19:06:43Z",
                "created_at": "2011-01-26T19:01:12Z",
                "updated_at": "2011-01-26T19:14:43Z",
                "permissions": {"admin": False, "push": False, "pull": True},
            }
        ]
        response2 = create_response()
        response2.json.return_value = [
            {
                "id": 2,
                "node_id": "MDEwOlJlcG9zaXRvcnkxMjk2MjY5",
                "name": "Hello-World",
                "full_name": "octocat/Hello-World",
                "owner": {
                    "login": "octocat",
                    "id": 1,
                    "node_id": "MDQ6VXNlcjE=",
                    "avatar_url": "https://github.com/images/error/octocat_happy.gif",
                    "gravatar_id": "",
                    "url": "https://api.github.com/users/octocat",
                    "html_url": "https://github.com/octocat",
                    "followers_url": "https://api.github.com/users/octocat/followers",
                    "following_url": "https://api.github.com/users/octocat/following{/other_user}",
                    "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
                    "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
                    "subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
                    "organizations_url": "https://api.github.com/users/octocat/orgs",
                    "repos_url": "https://api.github.com/users/octocat/repos",
                    "events_url": "https://api.github.com/users/octocat/events{/privacy}",
                    "received_events_url": "https://api.github.com/users/octocat/received_events",
                    "type": "User",
                    "site_admin": False,
                },
                "private": False,
                "html_url": "https://github.com/octocat/Hello-World",
                "description": "This your first repo!",
                "fork": False,
                "url": "https://api.github.com/repos/octocat/Hello-World",
                "archive_url": "https://api.github.com/repos/octocat/Hello-World/{archive_format}{/ref}",
                "assignees_url": "https://api.github.com/repos/octocat/Hello-World/assignees{/user}",
                "blobs_url": "https://api.github.com/repos/octocat/Hello-World/git/blobs{/sha}",
                "branches_url": "https://api.github.com/repos/octocat/Hello-World/branches{/branch}",
                "collaborators_url": "https://api.github.com/repos/octocat/Hello-World/collaborators{/collaborator}",
                "comments_url": "https://api.github.com/repos/octocat/Hello-World/comments{/number}",
                "commits_url": "https://api.github.com/repos/octocat/Hello-World/commits{/sha}",
                "compare_url": "https://api.github.com/repos/octocat/Hello-World/compare/{base}...{head}",
                "contents_url": "https://api.github.com/repos/octocat/Hello-World/contents/{+path}",
                "contributors_url": "https://api.github.com/repos/octocat/Hello-World/contributors",
                "deployments_url": "https://api.github.com/repos/octocat/Hello-World/deployments",
                "downloads_url": "https://api.github.com/repos/octocat/Hello-World/downloads",
                "events_url": "https://api.github.com/repos/octocat/Hello-World/events",
                "forks_url": "https://api.github.com/repos/octocat/Hello-World/forks",
                "git_commits_url": "https://api.github.com/repos/octocat/Hello-World/git/commits{/sha}",
                "git_refs_url": "https://api.github.com/repos/octocat/Hello-World/git/refs{/sha}",
                "git_tags_url": "https://api.github.com/repos/octocat/Hello-World/git/tags{/sha}",
                "git_url": "git:github.com/octocat/Hello-World.git",
                "issue_comment_url": "https://api.github.com/repos/octocat/Hello-World/issues/comments{/number}",
                "issue_events_url": "https://api.github.com/repos/octocat/Hello-World/issues/events{/number}",
                "issues_url": "https://api.github.com/repos/octocat/Hello-World/issues{/number}",
                "keys_url": "https://api.github.com/repos/octocat/Hello-World/keys{/key_id}",
                "labels_url": "https://api.github.com/repos/octocat/Hello-World/labels{/name}",
                "languages_url": "https://api.github.com/repos/octocat/Hello-World/languages",
                "merges_url": "https://api.github.com/repos/octocat/Hello-World/merges",
                "milestones_url": "https://api.github.com/repos/octocat/Hello-World/milestones{/number}",
                "notifications_url": "https://api.github.com/repos/octocat/Hello-World/notifications{?since,all,participating}",
                "pulls_url": "https://api.github.com/repos/octocat/Hello-World/pulls{/number}",
                "releases_url": "https://api.github.com/repos/octocat/Hello-World/releases{/id}",
                "ssh_url": "git@github.com:octocat/Hello-World.git",
                "stargazers_url": "https://api.github.com/repos/octocat/Hello-World/stargazers",
                "statuses_url": "https://api.github.com/repos/octocat/Hello-World/statuses/{sha}",
                "subscribers_url": "https://api.github.com/repos/octocat/Hello-World/subscribers",
                "subscription_url": "https://api.github.com/repos/octocat/Hello-World/subscription",
                "tags_url": "https://api.github.com/repos/octocat/Hello-World/tags",
                "teams_url": "https://api.github.com/repos/octocat/Hello-World/teams",
                "trees_url": "https://api.github.com/repos/octocat/Hello-World/git/trees{/sha}",
                "clone_url": "https://github.com/octocat/Hello-World.git",
                "mirror_url": "git:git.example.com/octocat/Hello-World",
                "hooks_url": "https://api.github.com/repos/octocat/Hello-World/hooks",
                "svn_url": "https://svn.github.com/octocat/Hello-World",
                "homepage": "https://github.com",
                "language": None,
                "forks_count": 9,
                "stargazers_count": 80,
                "watchers_count": 80,
                "size": 108,
                "default_branch": "master",
                "open_issues_count": 0,
                "is_template": False,
                "topics": ["octocat", "atom", "electron", "api"],
                "has_issues": True,
                "has_projects": True,
                "has_wiki": True,
                "has_pages": False,
                "has_downloads": True,
                "has_discussions": False,
                "archived": False,
                "disabled": False,
                "visibility": "public",
                "pushed_at": "2011-01-26T19:06:43Z",
                "created_at": "2011-01-26T19:01:12Z",
                "updated_at": "2011-01-26T19:14:43Z",
                "permissions": {"admin": False, "push": False, "pull": True},
            },
            {
                "id": 3,
                "node_id": "MDEwOlJlcG9zaXRvcnkxMjk2MjY5",
                "name": "Hello-World",
                "full_name": "octocat/Hello-World",
                "owner": {
                    "login": "octocat",
                    "id": 1,
                    "node_id": "MDQ6VXNlcjE=",
                    "avatar_url": "https://github.com/images/error/octocat_happy.gif",
                    "gravatar_id": "",
                    "url": "https://api.github.com/users/octocat",
                    "html_url": "https://github.com/octocat",
                    "followers_url": "https://api.github.com/users/octocat/followers",
                    "following_url": "https://api.github.com/users/octocat/following{/other_user}",
                    "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
                    "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
                    "subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
                    "organizations_url": "https://api.github.com/users/octocat/orgs",
                    "repos_url": "https://api.github.com/users/octocat/repos",
                    "events_url": "https://api.github.com/users/octocat/events{/privacy}",
                    "received_events_url": "https://api.github.com/users/octocat/received_events",
                    "type": "User",
                    "site_admin": False,
                },
                "private": False,
                "html_url": "https://github.com/octocat/Hello-World",
                "description": "This your first repo!",
                "fork": False,
                "url": "https://api.github.com/repos/octocat/Hello-World",
                "archive_url": "https://api.github.com/repos/octocat/Hello-World/{archive_format}{/ref}",
                "assignees_url": "https://api.github.com/repos/octocat/Hello-World/assignees{/user}",
                "blobs_url": "https://api.github.com/repos/octocat/Hello-World/git/blobs{/sha}",
                "branches_url": "https://api.github.com/repos/octocat/Hello-World/branches{/branch}",
                "collaborators_url": "https://api.github.com/repos/octocat/Hello-World/collaborators{/collaborator}",
                "comments_url": "https://api.github.com/repos/octocat/Hello-World/comments{/number}",
                "commits_url": "https://api.github.com/repos/octocat/Hello-World/commits{/sha}",
                "compare_url": "https://api.github.com/repos/octocat/Hello-World/compare/{base}...{head}",
                "contents_url": "https://api.github.com/repos/octocat/Hello-World/contents/{+path}",
                "contributors_url": "https://api.github.com/repos/octocat/Hello-World/contributors",
                "deployments_url": "https://api.github.com/repos/octocat/Hello-World/deployments",
                "downloads_url": "https://api.github.com/repos/octocat/Hello-World/downloads",
                "events_url": "https://api.github.com/repos/octocat/Hello-World/events",
                "forks_url": "https://api.github.com/repos/octocat/Hello-World/forks",
                "git_commits_url": "https://api.github.com/repos/octocat/Hello-World/git/commits{/sha}",
                "git_refs_url": "https://api.github.com/repos/octocat/Hello-World/git/refs{/sha}",
                "git_tags_url": "https://api.github.com/repos/octocat/Hello-World/git/tags{/sha}",
                "git_url": "git:github.com/octocat/Hello-World.git",
                "issue_comment_url": "https://api.github.com/repos/octocat/Hello-World/issues/comments{/number}",
                "issue_events_url": "https://api.github.com/repos/octocat/Hello-World/issues/events{/number}",
                "issues_url": "https://api.github.com/repos/octocat/Hello-World/issues{/number}",
                "keys_url": "https://api.github.com/repos/octocat/Hello-World/keys{/key_id}",
                "labels_url": "https://api.github.com/repos/octocat/Hello-World/labels{/name}",
                "languages_url": "https://api.github.com/repos/octocat/Hello-World/languages",
                "merges_url": "https://api.github.com/repos/octocat/Hello-World/merges",
                "milestones_url": "https://api.github.com/repos/octocat/Hello-World/milestones{/number}",
                "notifications_url": "https://api.github.com/repos/octocat/Hello-World/notifications{?since,all,participating}",
                "pulls_url": "https://api.github.com/repos/octocat/Hello-World/pulls{/number}",
                "releases_url": "https://api.github.com/repos/octocat/Hello-World/releases{/id}",
                "ssh_url": "git@github.com:octocat/Hello-World.git",
                "stargazers_url": "https://api.github.com/repos/octocat/Hello-World/stargazers",
                "statuses_url": "https://api.github.com/repos/octocat/Hello-World/statuses/{sha}",
                "subscribers_url": "https://api.github.com/repos/octocat/Hello-World/subscribers",
                "subscription_url": "https://api.github.com/repos/octocat/Hello-World/subscription",
                "tags_url": "https://api.github.com/repos/octocat/Hello-World/tags",
                "teams_url": "https://api.github.com/repos/octocat/Hello-World/teams",
                "trees_url": "https://api.github.com/repos/octocat/Hello-World/git/trees{/sha}",
                "clone_url": "https://github.com/octocat/Hello-World.git",
                "mirror_url": "git:git.example.com/octocat/Hello-World",
                "hooks_url": "https://api.github.com/repos/octocat/Hello-World/hooks",
                "svn_url": "https://svn.github.com/octocat/Hello-World",
                "homepage": "https://github.com",
                "language": None,
                "forks_count": 9,
                "stargazers_count": 80,
                "watchers_count": 80,
                "size": 108,
                "default_branch": "master",
                "open_issues_count": 0,
                "is_template": False,
                "topics": ["octocat", "atom", "electron", "api"],
                "has_issues": True,
                "has_projects": True,
                "has_wiki": True,
                "has_pages": False,
                "has_downloads": True,
                "has_discussions": False,
                "archived": False,
                "disabled": False,
                "visibility": "public",
                "pushed_at": "2011-01-26T19:06:43Z",
                "created_at": "2011-01-26T19:01:12Z",
                "updated_at": "2011-01-26T19:14:43Z",
                "permissions": {"admin": False, "push": False, "pull": True},
            },
        ]

        self.client.get_all.return_value = AsyncIteratorMock(
            [response1, response2]
        )

        async_it = aiter(self.api.repositories("foo", "bar"))
        repo = await anext(async_it)
        self.assertEqual(repo.id, 1)
        repo = await anext(async_it)
        self.assertEqual(repo.id, 2)
        repo = await anext(async_it)
        self.assertEqual(repo.id, 3)

        with self.assertRaises(StopAsyncIteration):
            await anext(async_it)

        self.client.get_all.assert_called_once_with(
            "/orgs/foo/teams/bar/repos",
            params={"per_page": "100"},
        )

    async def test_update_permissions(self):
        response = create_response()
        self.client.put.return_value = response

        await self.api.update_permission("foo", "bar", "baz", Permission.TRIAGE)

        self.client.put.assert_awaited_once_with(
            "/orgs/foo/teams/bar/repos/foo/baz", data={"permission": "triage"}
        )

    async def test_add_permissions(self):
        response = create_response()
        self.client.put.return_value = response

        await self.api.add_permission("foo", "bar", "baz", Permission.ADMIN)

        self.client.put.assert_awaited_once_with(
            "/orgs/foo/teams/bar/repos/foo/baz", data={"permission": "admin"}
        )

    async def test_update_permissions_failure(self):
        response = create_response()
        self.client.put.side_effect = httpx.HTTPStatusError(
            "404", request=MagicMock(), response=response
        )

        with self.assertRaises(httpx.HTTPStatusError):
            await self.api.update_permission(
                "foo", "bar", "baz", Permission.TRIAGE
            )

        self.client.put.assert_awaited_once_with(
            "/orgs/foo/teams/bar/repos/foo/baz", data={"permission": "triage"}
        )
pontos-25.3.2/tests/github/api/test_users.py000066400000000000000000000231461476255566300211250ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later

# ruff: noqa:E501

from pontos.github.api.users import GitHubAsyncRESTUsers
from tests import AsyncIteratorMock, aiter, anext
from tests.github.api import GitHubAsyncRESTTestCase, create_response


class GitHubAsyncRESTUsersTestCase(GitHubAsyncRESTTestCase):
    api_cls = GitHubAsyncRESTUsers

    async def test_users(self):
        response = create_response()
        response.json.return_value = [
            {
                "login": "octocat",
                "id": 1,
                "node_id": "MDQ6VXNlcjE=",
                "avatar_url": "https://github.com/images/error/octocat_happy.gif",
                "gravatar_id": "",
                "url": "https://api.github.com/users/octocat",
                "html_url": "https://github.com/octocat",
                "followers_url": "https://api.github.com/users/octocat/followers",
                "following_url": "https://api.github.com/users/octocat/following{/other_user}",
                "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
                "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
                "subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
                "organizations_url": "https://api.github.com/users/octocat/orgs",
                "repos_url": "https://api.github.com/users/octocat/repos",
                "events_url": "https://api.github.com/users/octocat/events{/privacy}",
                "received_events_url": "https://api.github.com/users/octocat/received_events",
                "type": "User",
                "site_admin": False,
            }
        ]

        self.client.get_all.return_value = AsyncIteratorMock([response])

        async_it = aiter(self.api.users())
        user = await anext(async_it)
        self.assertEqual(user.id, 1)

        with self.assertRaises(StopAsyncIteration):
            await anext(async_it)

        self.client.get_all.assert_called_once_with(
            "/users",
            params={"per_page": "100"},
        )

    async def test_user(self):
        response = create_response()
        response.json.return_value = {
            "login": "octocat",
            "id": 1,
            "node_id": "MDQ6VXNlcjE=",
            "avatar_url": "https://github.com/images/error/octocat_happy.gif",
            "gravatar_id": "",
            "url": "https://api.github.com/users/octocat",
            "html_url": "https://github.com/octocat",
            "followers_url": "https://api.github.com/users/octocat/followers",
            "following_url": "https://api.github.com/users/octocat/following{/other_user}",
            "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
            "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
            "subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
            "organizations_url": "https://api.github.com/users/octocat/orgs",
            "repos_url": "https://api.github.com/users/octocat/repos",
            "events_url": "https://api.github.com/users/octocat/events{/privacy}",
            "received_events_url": "https://api.github.com/users/octocat/received_events",
            "type": "User",
            "site_admin": False,
            "name": "monalisa octocat",
            "company": "GitHub",
            "blog": "https://github.com/blog",
            "location": "San Francisco",
            "email": "octocat@github.com",
            "hireable": False,
            "bio": "There once was...",
            "twitter_username": "monatheoctocat",
            "public_repos": 2,
            "public_gists": 1,
            "followers": 20,
            "following": 0,
            "created_at": "2008-01-14T04:33:35Z",
            "updated_at": "2008-01-14T04:33:35Z",
        }
        self.client.get.return_value = response

        user = await self.api.user(
            "octocat",
        )

        self.client.get.assert_awaited_once_with(
            "/users/octocat",
        )

        self.assertEqual(user.id, 1)

    async def test_current_user(self):
        response = create_response()
        response.json.return_value = {
            "login": "octocat",
            "id": 1,
            "node_id": "MDQ6VXNlcjE=",
            "avatar_url": "https://github.com/images/error/octocat_happy.gif",
            "gravatar_id": "",
            "url": "https://api.github.com/users/octocat",
            "html_url": "https://github.com/octocat",
            "followers_url": "https://api.github.com/users/octocat/followers",
            "following_url": "https://api.github.com/users/octocat/following{/other_user}",
            "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
            "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
            "subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
            "organizations_url": "https://api.github.com/users/octocat/orgs",
            "repos_url": "https://api.github.com/users/octocat/repos",
            "events_url": "https://api.github.com/users/octocat/events{/privacy}",
            "received_events_url": "https://api.github.com/users/octocat/received_events",
            "type": "User",
            "site_admin": False,
            "name": "monalisa octocat",
            "company": "GitHub",
            "blog": "https://github.com/blog",
            "location": "San Francisco",
            "email": "octocat@github.com",
            "hireable": False,
            "bio": "There once was...",
            "twitter_username": "monatheoctocat",
            "public_repos": 2,
            "public_gists": 1,
            "followers": 20,
            "following": 0,
            "created_at": "2008-01-14T04:33:35Z",
            "updated_at": "2008-01-14T04:33:35Z",
        }
        self.client.get.return_value = response

        user = await self.api.current_user()

        self.client.get.assert_awaited_once_with("/user")

        self.assertEqual(user.id, 1)

    async def test_user_keys(self):
        response = create_response()
        response.json.return_value = [{"id": 1, "key": "ssh-rsa AAA..."}]

        self.client.get_all.return_value = AsyncIteratorMock([response])

        async_it = aiter(self.api.user_keys("foo"))
        key = await anext(async_it)
        self.assertEqual(key.id, 1)

        with self.assertRaises(StopAsyncIteration):
            await anext(async_it)

        self.client.get_all.assert_called_once_with(
            "/users/foo/keys",
            params={"per_page": "100"},
        )

    async def test_keys(self):
        response = create_response()
        response.json.return_value = [{"id": 1, "key": "ssh-rsa AAA..."}]

        self.client.get_all.return_value = AsyncIteratorMock([response])

        async_it = aiter(self.api.keys())
        key = await anext(async_it)
        self.assertEqual(key.id, 1)

        with self.assertRaises(StopAsyncIteration):
            await anext(async_it)

        self.client.get_all.assert_called_once_with(
            "/user/keys",
            params={"per_page": "100"},
        )

    async def test_emails(self):
        response = create_response()
        response.json.return_value = [
            {
                "email": "octocat@github.com",
                "verified": True,
                "primary": True,
                "visibility": "public",
            }
        ]

        self.client.get_all.return_value = AsyncIteratorMock([response])

        async_it = aiter(self.api.emails())
        email = await anext(async_it)
        self.assertEqual(email.email, "octocat@github.com")

        with self.assertRaises(StopAsyncIteration):
            await anext(async_it)

        self.client.get_all.assert_called_once_with(
            "/user/emails",
            params={"per_page": "100"},
        )

    async def test_key(self):
        response = create_response()
        response.json.return_value = {
            "key": "2Sg8iYjAxxmI2LvUXpJjkYrMxURPc8r+dB7TJyvv1234",
            "id": 2,
            "url": "https://api.github.com/user/keys/2",
            "title": "ssh-rsa AAAAB3NzaC1yc2EAAA",
            "created_at": "2020-06-11T21:31:57Z",
            "verified": False,
            "read_only": False,
        }
        self.client.get.return_value = response

        key = await self.api.key(2)

        self.client.get.assert_awaited_once_with(
            "/user/keys/2",
        )

        self.assertEqual(key.id, 2)

    async def test_delete_key(self):
        response = create_response()
        self.client.get.return_value = response

        await self.api.delete_key(2)

        self.client.delete.assert_awaited_once_with(
            "/user/keys/2",
        )

    async def test_create_key(self):
        response = create_response()
        response.json.return_value = {
            "key": "2Sg8iYjAxxmI2LvUXpJjkYrMxURPc8r+dB7TJyvv1234",
            "id": 2,
            "url": "https://api.github.com/user/keys/2",
            "title": "ssh-rsa AAAAB3NzaC1yc2EAAA",
            "created_at": "2020-06-11T21:31:57Z",
            "verified": False,
            "read_only": False,
        }
        self.client.post.return_value = response

        key = await self.api.create_key(
            "ssh-rsa AAAAB3NzaC1yc2EAAA",
            "2Sg8iYjAxxmI2LvUXpJjkYrMxURPc8r+dB7TJyvv1234",
        )

        self.client.post.assert_awaited_once_with(
            "/user/keys",
            data={
                "title": "ssh-rsa AAAAB3NzaC1yc2EAAA",
                "key": "2Sg8iYjAxxmI2LvUXpJjkYrMxURPc8r+dB7TJyvv1234",
            },
        )

        self.assertEqual(key.id, 2)
pontos-25.3.2/tests/github/api/test_workflows.py000066400000000000000000003733271476255566300220320ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later
#

# pylint: disable=too-many-lines, redefined-builtin, line-too-long

from pathlib import Path
from unittest.mock import MagicMock

import httpx

from pontos.github.api.workflows import GitHubAsyncRESTWorkflows
from tests import AsyncIteratorMock, aiter, anext
from tests.github.api import GitHubAsyncRESTTestCase, create_response

here = Path(__file__).parent


class GitHubAsyncRESTWorkflowsTestCase(GitHubAsyncRESTTestCase):
    api_cls = GitHubAsyncRESTWorkflows

    async def test_get(self):
        response = create_response()
        response.json.return_value = {
            "id": 1,
            "node_id": "MDg6V29ya2Zsb3cxNjEzMzU=",
            "name": "CI",
            "path": ".github/workflows/blank.yaml",
            "state": "active",
            "created_at": "2020-01-08T23:48:37.000-08:00",
            "updated_at": "2020-01-08T23:50:21.000-08:00",
            "url": "https://api.github.com/repos/octo-org/octo-repo/actions/workflows/161335",
            "html_url": "https://github.com/octo-org/octo-repo/blob/master/.github/workflows/161335",
            "badge_url": "https://github.com/octo-org/octo-repo/workflows/CI/badge.svg",
        }
        self.client.get.return_value = response

        workflow = await self.api.get("foo/bar", "ci.yml")

        self.client.get.assert_awaited_once_with(
            "/repos/foo/bar/actions/workflows/ci.yml"
        )

        self.assertEqual(workflow.id, 1)

    async def test_get_failure(self):
        response = create_response()
        self.client.get.side_effect = httpx.HTTPStatusError(
            "404", request=MagicMock(), response=response
        )

        with self.assertRaises(httpx.HTTPStatusError):
            await self.api.get("foo/bar", "ci.yml")

        self.client.get.assert_awaited_once_with(
            "/repos/foo/bar/actions/workflows/ci.yml"
        )

    async def test_get_all(self):
        response1 = create_response()
        response1.json.return_value = {
            "workflows": [
                {
                    "id": 1,
                    "node_id": "MDg6V29ya2Zsb3cxNjEzMzU=",
                    "name": "CI",
                    "path": ".github/workflows/blank.yaml",
                    "state": "active",
                    "created_at": "2020-01-08T23:48:37.000-08:00",
                    "updated_at": "2020-01-08T23:50:21.000-08:00",
                    "url": "https://api.github.com/repos/octo-org/octo-repo/actions/workflows/161335",
                    "html_url": "https://github.com/octo-org/octo-repo/blob/master/.github/workflows/161335",
                    "badge_url": "https://github.com/octo-org/octo-repo/workflows/CI/badge.svg",
                }
            ]
        }
        response2 = create_response()
        response2.json.return_value = {
            "workflows": [
                {
                    "id": 2,
                    "node_id": "MDg6V29ya2Zsb3cxNjEzMzU=",
                    "name": "CI",
                    "path": ".github/workflows/blank.yaml",
                    "state": "active",
                    "created_at": "2020-01-08T23:48:37.000-08:00",
                    "updated_at": "2020-01-08T23:50:21.000-08:00",
                    "url": "https://api.github.com/repos/octo-org/octo-repo/actions/workflows/161335",
                    "html_url": "https://github.com/octo-org/octo-repo/blob/master/.github/workflows/161335",
                    "badge_url": "https://github.com/octo-org/octo-repo/workflows/CI/badge.svg",
                },
                {
                    "id": 3,
                    "node_id": "MDg6V29ya2Zsb3cxNjEzMzU=",
                    "name": "CI",
                    "path": ".github/workflows/blank.yaml",
                    "state": "active",
                    "created_at": "2020-01-08T23:48:37.000-08:00",
                    "updated_at": "2020-01-08T23:50:21.000-08:00",
                    "url": "https://api.github.com/repos/octo-org/octo-repo/actions/workflows/161335",
                    "html_url": "https://github.com/octo-org/octo-repo/blob/master/.github/workflows/161335",
                    "badge_url": "https://github.com/octo-org/octo-repo/workflows/CI/badge.svg",
                },
            ]
        }

        self.client.get_all.return_value = AsyncIteratorMock(
            [response1, response2]
        )

        async_it = aiter(self.api.get_all("foo/bar"))
        workflow = await anext(async_it)
        self.assertEqual(workflow.id, 1)
        workflow = await anext(async_it)
        self.assertEqual(workflow.id, 2)
        workflow = await anext(async_it)
        self.assertEqual(workflow.id, 3)

        with self.assertRaises(StopAsyncIteration):
            await anext(async_it)

        self.client.get_all.assert_called_once_with(
            "/repos/foo/bar/actions/workflows",
            params={"per_page": "100"},
        )

    async def test_get_workflow_runs(self):
        response1 = create_response()
        response1.json.return_value = {
            "workflow_runs": [
                {
                    "id": 1,
                    "name": "Build",
                    "node_id": "MDEyOldvcmtmbG93IFJ1bjI2OTI4OQ==",
                    "check_suite_id": 42,
                    "check_suite_node_id": "MDEwOkNoZWNrU3VpdGU0Mg==",
                    "head_branch": "master",
                    "head_sha": "acb5820ced9479c074f688cc328bf03f341a511d",
                    "run_number": 562,
                    "event": "push",
                    "status": "queued",
                    "conclusion": None,
                    "workflow_id": 159038,
                    "url": "https://api.github.com/repos/octo-org/octo-repo/actions/runs/30433642",
                    "html_url": "https://github.com/octo-org/octo-repo/actions/runs/30433642",
                    "pull_requests": [],
                    "created_at": "2020-01-22T19:33:08Z",
                    "updated_at": "2020-01-22T19:33:08Z",
                    "actor": {
                        "login": "octocat",
                        "id": 1,
                        "node_id": "MDQ6VXNlcjE=",
                        "avatar_url": "https://github.com/images/error/octocat_happy.gif",
                        "gravatar_id": "",
                        "url": "https://api.github.com/users/octocat",
                        "html_url": "https://github.com/octocat",
                        "followers_url": "https://api.github.com/users/octocat/followers",
                        "following_url": "https://api.github.com/users/octocat/following{/other_user}",
                        "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
                        "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
                        "subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
                        "organizations_url": "https://api.github.com/users/octocat/orgs",
                        "repos_url": "https://api.github.com/users/octocat/repos",
                        "events_url": "https://api.github.com/users/octocat/events{/privacy}",
                        "received_events_url": "https://api.github.com/users/octocat/received_events",
                        "type": "User",
                        "site_admin": False,
                    },
                    "run_attempt": 1,
                    "run_started_at": "2020-01-22T19:33:08Z",
                    "triggering_actor": {
                        "login": "octocat",
                        "id": 1,
                        "node_id": "MDQ6VXNlcjE=",
                        "avatar_url": "https://github.com/images/error/octocat_happy.gif",
                        "gravatar_id": "",
                        "url": "https://api.github.com/users/octocat",
                        "html_url": "https://github.com/octocat",
                        "followers_url": "https://api.github.com/users/octocat/followers",
                        "following_url": "https://api.github.com/users/octocat/following{/other_user}",
                        "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
                        "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
                        "subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
                        "organizations_url": "https://api.github.com/users/octocat/orgs",
                        "repos_url": "https://api.github.com/users/octocat/repos",
                        "events_url": "https://api.github.com/users/octocat/events{/privacy}",
                        "received_events_url": "https://api.github.com/users/octocat/received_events",
                        "type": "User",
                        "site_admin": False,
                    },
                    "jobs_url": "https://api.github.com/repos/octo-org/octo-repo/actions/runs/30433642/jobs",
                    "logs_url": "https://api.github.com/repos/octo-org/octo-repo/actions/runs/30433642/logs",
                    "check_suite_url": "https://api.github.com/repos/octo-org/octo-repo/check-suites/414944374",
                    "artifacts_url": "https://api.github.com/repos/octo-org/octo-repo/actions/runs/30433642/artifacts",
                    "cancel_url": "https://api.github.com/repos/octo-org/octo-repo/actions/runs/30433642/cancel",
                    "rerun_url": "https://api.github.com/repos/octo-org/octo-repo/actions/runs/30433642/rerun",
                    "workflow_url": "https://api.github.com/repos/octo-org/octo-repo/actions/workflows/159038",
                    "head_commit": {
                        "id": "acb5820ced9479c074f688cc328bf03f341a511d",
                        "tree_id": "d23f6eedb1e1b9610bbc754ddb5197bfe7271223",
                        "message": "Create linter.yaml",
                        "timestamp": "2020-01-22T19:33:05Z",
                        "author": {
                            "name": "Octo Cat",
                            "email": "octocat@github.com",
                        },
                        "committer": {
                            "name": "GitHub",
                            "email": "noreply@github.com",
                        },
                    },
                    "repository": {
                        "id": 1296269,
                        "node_id": "MDEwOlJlcG9zaXRvcnkxMjk2MjY5",
                        "name": "Hello-World",
                        "full_name": "octocat/Hello-World",
                        "owner": {
                            "login": "octocat",
                            "id": 1,
                            "node_id": "MDQ6VXNlcjE=",
                            "avatar_url": "https://github.com/images/error/octocat_happy.gif",
                            "gravatar_id": "",
                            "url": "https://api.github.com/users/octocat",
                            "html_url": "https://github.com/octocat",
                            "followers_url": "https://api.github.com/users/octocat/followers",
                            "following_url": "https://api.github.com/users/octocat/following{/other_user}",
                            "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
                            "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
                            "subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
                            "organizations_url": "https://api.github.com/users/octocat/orgs",
                            "repos_url": "https://api.github.com/users/octocat/repos",
                            "events_url": "https://api.github.com/users/octocat/events{/privacy}",
                            "received_events_url": "https://api.github.com/users/octocat/received_events",
                            "type": "User",
                            "site_admin": False,
                        },
                        "private": False,
                        "html_url": "https://github.com/octocat/Hello-World",
                        "description": "This your first repo!",
                        "fork": False,
                        "url": "https://api.github.com/repos/octocat/Hello-World",
                        "archive_url": "https://api.github.com/repos/octocat/Hello-World/{archive_format}{/ref}",
                        "assignees_url": "https://api.github.com/repos/octocat/Hello-World/assignees{/user}",
                        "blobs_url": "https://api.github.com/repos/octocat/Hello-World/git/blobs{/sha}",
                        "branches_url": "https://api.github.com/repos/octocat/Hello-World/branches{/branch}",
                        "collaborators_url": "https://api.github.com/repos/octocat/Hello-World/collaborators{/collaborator}",
                        "comments_url": "https://api.github.com/repos/octocat/Hello-World/comments{/number}",
                        "commits_url": "https://api.github.com/repos/octocat/Hello-World/commits{/sha}",
                        "compare_url": "https://api.github.com/repos/octocat/Hello-World/compare/{base}...{head}",
                        "contents_url": "https://api.github.com/repos/octocat/Hello-World/contents/{+path}",
                        "contributors_url": "https://api.github.com/repos/octocat/Hello-World/contributors",
                        "deployments_url": "https://api.github.com/repos/octocat/Hello-World/deployments",
                        "downloads_url": "https://api.github.com/repos/octocat/Hello-World/downloads",
                        "events_url": "https://api.github.com/repos/octocat/Hello-World/events",
                        "forks_url": "https://api.github.com/repos/octocat/Hello-World/forks",
                        "git_commits_url": "https://api.github.com/repos/octocat/Hello-World/git/commits{/sha}",
                        "git_refs_url": "https://api.github.com/repos/octocat/Hello-World/git/refs{/sha}",
                        "git_tags_url": "https://api.github.com/repos/octocat/Hello-World/git/tags{/sha}",
                        "git_url": "git:github.com/octocat/Hello-World.git",
                        "issue_comment_url": "https://api.github.com/repos/octocat/Hello-World/issues/comments{/number}",
                        "issue_events_url": "https://api.github.com/repos/octocat/Hello-World/issues/events{/number}",
                        "issues_url": "https://api.github.com/repos/octocat/Hello-World/issues{/number}",
                        "keys_url": "https://api.github.com/repos/octocat/Hello-World/keys{/key_id}",
                        "labels_url": "https://api.github.com/repos/octocat/Hello-World/labels{/name}",
                        "languages_url": "https://api.github.com/repos/octocat/Hello-World/languages",
                        "merges_url": "https://api.github.com/repos/octocat/Hello-World/merges",
                        "milestones_url": "https://api.github.com/repos/octocat/Hello-World/milestones{/number}",
                        "notifications_url": "https://api.github.com/repos/octocat/Hello-World/notifications{?since,all,participating}",
                        "pulls_url": "https://api.github.com/repos/octocat/Hello-World/pulls{/number}",
                        "releases_url": "https://api.github.com/repos/octocat/Hello-World/releases{/id}",
                        "ssh_url": "git@github.com:octocat/Hello-World.git",
                        "stargazers_url": "https://api.github.com/repos/octocat/Hello-World/stargazers",
                        "statuses_url": "https://api.github.com/repos/octocat/Hello-World/statuses/{sha}",
                        "subscribers_url": "https://api.github.com/repos/octocat/Hello-World/subscribers",
                        "subscription_url": "https://api.github.com/repos/octocat/Hello-World/subscription",
                        "tags_url": "https://api.github.com/repos/octocat/Hello-World/tags",
                        "teams_url": "https://api.github.com/repos/octocat/Hello-World/teams",
                        "trees_url": "https://api.github.com/repos/octocat/Hello-World/git/trees{/sha}",
                        "hooks_url": "http://api.github.com/repos/octocat/Hello-World/hooks",
                    },
                    "head_repository": {
                        "id": 217723378,
                        "node_id": "MDEwOlJlcG9zaXRvcnkyMTc3MjMzNzg=",
                        "name": "octo-repo",
                        "full_name": "octo-org/octo-repo",
                        "private": True,
                        "owner": {
                            "login": "octocat",
                            "id": 1,
                            "node_id": "MDQ6VXNlcjE=",
                            "avatar_url": "https://github.com/images/error/octocat_happy.gif",
                            "gravatar_id": "",
                            "url": "https://api.github.com/users/octocat",
                            "html_url": "https://github.com/octocat",
                            "followers_url": "https://api.github.com/users/octocat/followers",
                            "following_url": "https://api.github.com/users/octocat/following{/other_user}",
                            "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
                            "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
                            "subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
                            "organizations_url": "https://api.github.com/users/octocat/orgs",
                            "repos_url": "https://api.github.com/users/octocat/repos",
                            "events_url": "https://api.github.com/users/octocat/events{/privacy}",
                            "received_events_url": "https://api.github.com/users/octocat/received_events",
                            "type": "User",
                            "site_admin": False,
                        },
                        "html_url": "https://github.com/octo-org/octo-repo",
                        "description": None,
                        "fork": False,
                        "url": "https://api.github.com/repos/octo-org/octo-repo",
                        "forks_url": "https://api.github.com/repos/octo-org/octo-repo/forks",
                        "keys_url": "https://api.github.com/repos/octo-org/octo-repo/keys{/key_id}",
                        "collaborators_url": "https://api.github.com/repos/octo-org/octo-repo/collaborators{/collaborator}",
                        "teams_url": "https://api.github.com/repos/octo-org/octo-repo/teams",
                        "hooks_url": "https://api.github.com/repos/octo-org/octo-repo/hooks",
                        "issue_events_url": "https://api.github.com/repos/octo-org/octo-repo/issues/events{/number}",
                        "events_url": "https://api.github.com/repos/octo-org/octo-repo/events",
                        "assignees_url": "https://api.github.com/repos/octo-org/octo-repo/assignees{/user}",
                        "branches_url": "https://api.github.com/repos/octo-org/octo-repo/branches{/branch}",
                        "tags_url": "https://api.github.com/repos/octo-org/octo-repo/tags",
                        "blobs_url": "https://api.github.com/repos/octo-org/octo-repo/git/blobs{/sha}",
                        "git_tags_url": "https://api.github.com/repos/octo-org/octo-repo/git/tags{/sha}",
                        "git_refs_url": "https://api.github.com/repos/octo-org/octo-repo/git/refs{/sha}",
                        "trees_url": "https://api.github.com/repos/octo-org/octo-repo/git/trees{/sha}",
                        "statuses_url": "https://api.github.com/repos/octo-org/octo-repo/statuses/{sha}",
                        "languages_url": "https://api.github.com/repos/octo-org/octo-repo/languages",
                        "stargazers_url": "https://api.github.com/repos/octo-org/octo-repo/stargazers",
                        "contributors_url": "https://api.github.com/repos/octo-org/octo-repo/contributors",
                        "subscribers_url": "https://api.github.com/repos/octo-org/octo-repo/subscribers",
                        "subscription_url": "https://api.github.com/repos/octo-org/octo-repo/subscription",
                        "commits_url": "https://api.github.com/repos/octo-org/octo-repo/commits{/sha}",
                        "git_commits_url": "https://api.github.com/repos/octo-org/octo-repo/git/commits{/sha}",
                        "comments_url": "https://api.github.com/repos/octo-org/octo-repo/comments{/number}",
                        "issue_comment_url": "https://api.github.com/repos/octo-org/octo-repo/issues/comments{/number}",
                        "contents_url": "https://api.github.com/repos/octo-org/octo-repo/contents/{+path}",
                        "compare_url": "https://api.github.com/repos/octo-org/octo-repo/compare/{base}...{head}",
                        "merges_url": "https://api.github.com/repos/octo-org/octo-repo/merges",
                        "archive_url": "https://api.github.com/repos/octo-org/octo-repo/{archive_format}{/ref}",
                        "downloads_url": "https://api.github.com/repos/octo-org/octo-repo/downloads",
                        "issues_url": "https://api.github.com/repos/octo-org/octo-repo/issues{/number}",
                        "pulls_url": "https://api.github.com/repos/octo-org/octo-repo/pulls{/number}",
                        "milestones_url": "https://api.github.com/repos/octo-org/octo-repo/milestones{/number}",
                        "notifications_url": "https://api.github.com/repos/octo-org/octo-repo/notifications{?since,all,participating}",
                        "labels_url": "https://api.github.com/repos/octo-org/octo-repo/labels{/name}",
                        "releases_url": "https://api.github.com/repos/octo-org/octo-repo/releases{/id}",
                        "deployments_url": "https://api.github.com/repos/octo-org/octo-repo/deployments",
                    },
                }
            ]
        }
        response2 = create_response()
        response2.json.return_value = {
            "workflow_runs": [
                {
                    "id": 2,
                    "name": "Build",
                    "node_id": "MDEyOldvcmtmbG93IFJ1bjI2OTI4OQ==",
                    "check_suite_id": 42,
                    "check_suite_node_id": "MDEwOkNoZWNrU3VpdGU0Mg==",
                    "head_branch": "master",
                    "head_sha": "acb5820ced9479c074f688cc328bf03f341a511d",
                    "run_number": 562,
                    "event": "push",
                    "status": "queued",
                    "conclusion": None,
                    "workflow_id": 159038,
                    "url": "https://api.github.com/repos/octo-org/octo-repo/actions/runs/30433642",
                    "html_url": "https://github.com/octo-org/octo-repo/actions/runs/30433642",
                    "pull_requests": [],
                    "created_at": "2020-01-22T19:33:08Z",
                    "updated_at": "2020-01-22T19:33:08Z",
                    "actor": {
                        "login": "octocat",
                        "id": 1,
                        "node_id": "MDQ6VXNlcjE=",
                        "avatar_url": "https://github.com/images/error/octocat_happy.gif",
                        "gravatar_id": "",
                        "url": "https://api.github.com/users/octocat",
                        "html_url": "https://github.com/octocat",
                        "followers_url": "https://api.github.com/users/octocat/followers",
                        "following_url": "https://api.github.com/users/octocat/following{/other_user}",
                        "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
                        "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
                        "subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
                        "organizations_url": "https://api.github.com/users/octocat/orgs",
                        "repos_url": "https://api.github.com/users/octocat/repos",
                        "events_url": "https://api.github.com/users/octocat/events{/privacy}",
                        "received_events_url": "https://api.github.com/users/octocat/received_events",
                        "type": "User",
                        "site_admin": False,
                    },
                    "run_attempt": 1,
                    "run_started_at": "2020-01-22T19:33:08Z",
                    "triggering_actor": {
                        "login": "octocat",
                        "id": 1,
                        "node_id": "MDQ6VXNlcjE=",
                        "avatar_url": "https://github.com/images/error/octocat_happy.gif",
                        "gravatar_id": "",
                        "url": "https://api.github.com/users/octocat",
                        "html_url": "https://github.com/octocat",
                        "followers_url": "https://api.github.com/users/octocat/followers",
                        "following_url": "https://api.github.com/users/octocat/following{/other_user}",
                        "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
                        "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
                        "subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
                        "organizations_url": "https://api.github.com/users/octocat/orgs",
                        "repos_url": "https://api.github.com/users/octocat/repos",
                        "events_url": "https://api.github.com/users/octocat/events{/privacy}",
                        "received_events_url": "https://api.github.com/users/octocat/received_events",
                        "type": "User",
                        "site_admin": False,
                    },
                    "jobs_url": "https://api.github.com/repos/octo-org/octo-repo/actions/runs/30433642/jobs",
                    "logs_url": "https://api.github.com/repos/octo-org/octo-repo/actions/runs/30433642/logs",
                    "check_suite_url": "https://api.github.com/repos/octo-org/octo-repo/check-suites/414944374",
                    "artifacts_url": "https://api.github.com/repos/octo-org/octo-repo/actions/runs/30433642/artifacts",
                    "cancel_url": "https://api.github.com/repos/octo-org/octo-repo/actions/runs/30433642/cancel",
                    "rerun_url": "https://api.github.com/repos/octo-org/octo-repo/actions/runs/30433642/rerun",
                    "workflow_url": "https://api.github.com/repos/octo-org/octo-repo/actions/workflows/159038",
                    "head_commit": {
                        "id": "acb5820ced9479c074f688cc328bf03f341a511d",
                        "tree_id": "d23f6eedb1e1b9610bbc754ddb5197bfe7271223",
                        "message": "Create linter.yaml",
                        "timestamp": "2020-01-22T19:33:05Z",
                        "author": {
                            "name": "Octo Cat",
                            "email": "octocat@github.com",
                        },
                        "committer": {
                            "name": "GitHub",
                            "email": "noreply@github.com",
                        },
                    },
                    "repository": {
                        "id": 1296269,
                        "node_id": "MDEwOlJlcG9zaXRvcnkxMjk2MjY5",
                        "name": "Hello-World",
                        "full_name": "octocat/Hello-World",
                        "owner": {
                            "login": "octocat",
                            "id": 1,
                            "node_id": "MDQ6VXNlcjE=",
                            "avatar_url": "https://github.com/images/error/octocat_happy.gif",
                            "gravatar_id": "",
                            "url": "https://api.github.com/users/octocat",
                            "html_url": "https://github.com/octocat",
                            "followers_url": "https://api.github.com/users/octocat/followers",
                            "following_url": "https://api.github.com/users/octocat/following{/other_user}",
                            "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
                            "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
                            "subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
                            "organizations_url": "https://api.github.com/users/octocat/orgs",
                            "repos_url": "https://api.github.com/users/octocat/repos",
                            "events_url": "https://api.github.com/users/octocat/events{/privacy}",
                            "received_events_url": "https://api.github.com/users/octocat/received_events",
                            "type": "User",
                            "site_admin": False,
                        },
                        "private": False,
                        "html_url": "https://github.com/octocat/Hello-World",
                        "description": "This your first repo!",
                        "fork": False,
                        "url": "https://api.github.com/repos/octocat/Hello-World",
                        "archive_url": "https://api.github.com/repos/octocat/Hello-World/{archive_format}{/ref}",
                        "assignees_url": "https://api.github.com/repos/octocat/Hello-World/assignees{/user}",
                        "blobs_url": "https://api.github.com/repos/octocat/Hello-World/git/blobs{/sha}",
                        "branches_url": "https://api.github.com/repos/octocat/Hello-World/branches{/branch}",
                        "collaborators_url": "https://api.github.com/repos/octocat/Hello-World/collaborators{/collaborator}",
                        "comments_url": "https://api.github.com/repos/octocat/Hello-World/comments{/number}",
                        "commits_url": "https://api.github.com/repos/octocat/Hello-World/commits{/sha}",
                        "compare_url": "https://api.github.com/repos/octocat/Hello-World/compare/{base}...{head}",
                        "contents_url": "https://api.github.com/repos/octocat/Hello-World/contents/{+path}",
                        "contributors_url": "https://api.github.com/repos/octocat/Hello-World/contributors",
                        "deployments_url": "https://api.github.com/repos/octocat/Hello-World/deployments",
                        "downloads_url": "https://api.github.com/repos/octocat/Hello-World/downloads",
                        "events_url": "https://api.github.com/repos/octocat/Hello-World/events",
                        "forks_url": "https://api.github.com/repos/octocat/Hello-World/forks",
                        "git_commits_url": "https://api.github.com/repos/octocat/Hello-World/git/commits{/sha}",
                        "git_refs_url": "https://api.github.com/repos/octocat/Hello-World/git/refs{/sha}",
                        "git_tags_url": "https://api.github.com/repos/octocat/Hello-World/git/tags{/sha}",
                        "git_url": "git:github.com/octocat/Hello-World.git",
                        "issue_comment_url": "https://api.github.com/repos/octocat/Hello-World/issues/comments{/number}",
                        "issue_events_url": "https://api.github.com/repos/octocat/Hello-World/issues/events{/number}",
                        "issues_url": "https://api.github.com/repos/octocat/Hello-World/issues{/number}",
                        "keys_url": "https://api.github.com/repos/octocat/Hello-World/keys{/key_id}",
                        "labels_url": "https://api.github.com/repos/octocat/Hello-World/labels{/name}",
                        "languages_url": "https://api.github.com/repos/octocat/Hello-World/languages",
                        "merges_url": "https://api.github.com/repos/octocat/Hello-World/merges",
                        "milestones_url": "https://api.github.com/repos/octocat/Hello-World/milestones{/number}",
                        "notifications_url": "https://api.github.com/repos/octocat/Hello-World/notifications{?since,all,participating}",
                        "pulls_url": "https://api.github.com/repos/octocat/Hello-World/pulls{/number}",
                        "releases_url": "https://api.github.com/repos/octocat/Hello-World/releases{/id}",
                        "ssh_url": "git@github.com:octocat/Hello-World.git",
                        "stargazers_url": "https://api.github.com/repos/octocat/Hello-World/stargazers",
                        "statuses_url": "https://api.github.com/repos/octocat/Hello-World/statuses/{sha}",
                        "subscribers_url": "https://api.github.com/repos/octocat/Hello-World/subscribers",
                        "subscription_url": "https://api.github.com/repos/octocat/Hello-World/subscription",
                        "tags_url": "https://api.github.com/repos/octocat/Hello-World/tags",
                        "teams_url": "https://api.github.com/repos/octocat/Hello-World/teams",
                        "trees_url": "https://api.github.com/repos/octocat/Hello-World/git/trees{/sha}",
                        "hooks_url": "http://api.github.com/repos/octocat/Hello-World/hooks",
                    },
                    "head_repository": {
                        "id": 217723378,
                        "node_id": "MDEwOlJlcG9zaXRvcnkyMTc3MjMzNzg=",
                        "name": "octo-repo",
                        "full_name": "octo-org/octo-repo",
                        "private": True,
                        "owner": {
                            "login": "octocat",
                            "id": 1,
                            "node_id": "MDQ6VXNlcjE=",
                            "avatar_url": "https://github.com/images/error/octocat_happy.gif",
                            "gravatar_id": "",
                            "url": "https://api.github.com/users/octocat",
                            "html_url": "https://github.com/octocat",
                            "followers_url": "https://api.github.com/users/octocat/followers",
                            "following_url": "https://api.github.com/users/octocat/following{/other_user}",
                            "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
                            "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
                            "subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
                            "organizations_url": "https://api.github.com/users/octocat/orgs",
                            "repos_url": "https://api.github.com/users/octocat/repos",
                            "events_url": "https://api.github.com/users/octocat/events{/privacy}",
                            "received_events_url": "https://api.github.com/users/octocat/received_events",
                            "type": "User",
                            "site_admin": False,
                        },
                        "html_url": "https://github.com/octo-org/octo-repo",
                        "description": None,
                        "fork": False,
                        "url": "https://api.github.com/repos/octo-org/octo-repo",
                        "forks_url": "https://api.github.com/repos/octo-org/octo-repo/forks",
                        "keys_url": "https://api.github.com/repos/octo-org/octo-repo/keys{/key_id}",
                        "collaborators_url": "https://api.github.com/repos/octo-org/octo-repo/collaborators{/collaborator}",
                        "teams_url": "https://api.github.com/repos/octo-org/octo-repo/teams",
                        "hooks_url": "https://api.github.com/repos/octo-org/octo-repo/hooks",
                        "issue_events_url": "https://api.github.com/repos/octo-org/octo-repo/issues/events{/number}",
                        "events_url": "https://api.github.com/repos/octo-org/octo-repo/events",
                        "assignees_url": "https://api.github.com/repos/octo-org/octo-repo/assignees{/user}",
                        "branches_url": "https://api.github.com/repos/octo-org/octo-repo/branches{/branch}",
                        "tags_url": "https://api.github.com/repos/octo-org/octo-repo/tags",
                        "blobs_url": "https://api.github.com/repos/octo-org/octo-repo/git/blobs{/sha}",
                        "git_tags_url": "https://api.github.com/repos/octo-org/octo-repo/git/tags{/sha}",
                        "git_refs_url": "https://api.github.com/repos/octo-org/octo-repo/git/refs{/sha}",
                        "trees_url": "https://api.github.com/repos/octo-org/octo-repo/git/trees{/sha}",
                        "statuses_url": "https://api.github.com/repos/octo-org/octo-repo/statuses/{sha}",
                        "languages_url": "https://api.github.com/repos/octo-org/octo-repo/languages",
                        "stargazers_url": "https://api.github.com/repos/octo-org/octo-repo/stargazers",
                        "contributors_url": "https://api.github.com/repos/octo-org/octo-repo/contributors",
                        "subscribers_url": "https://api.github.com/repos/octo-org/octo-repo/subscribers",
                        "subscription_url": "https://api.github.com/repos/octo-org/octo-repo/subscription",
                        "commits_url": "https://api.github.com/repos/octo-org/octo-repo/commits{/sha}",
                        "git_commits_url": "https://api.github.com/repos/octo-org/octo-repo/git/commits{/sha}",
                        "comments_url": "https://api.github.com/repos/octo-org/octo-repo/comments{/number}",
                        "issue_comment_url": "https://api.github.com/repos/octo-org/octo-repo/issues/comments{/number}",
                        "contents_url": "https://api.github.com/repos/octo-org/octo-repo/contents/{+path}",
                        "compare_url": "https://api.github.com/repos/octo-org/octo-repo/compare/{base}...{head}",
                        "merges_url": "https://api.github.com/repos/octo-org/octo-repo/merges",
                        "archive_url": "https://api.github.com/repos/octo-org/octo-repo/{archive_format}{/ref}",
                        "downloads_url": "https://api.github.com/repos/octo-org/octo-repo/downloads",
                        "issues_url": "https://api.github.com/repos/octo-org/octo-repo/issues{/number}",
                        "pulls_url": "https://api.github.com/repos/octo-org/octo-repo/pulls{/number}",
                        "milestones_url": "https://api.github.com/repos/octo-org/octo-repo/milestones{/number}",
                        "notifications_url": "https://api.github.com/repos/octo-org/octo-repo/notifications{?since,all,participating}",
                        "labels_url": "https://api.github.com/repos/octo-org/octo-repo/labels{/name}",
                        "releases_url": "https://api.github.com/repos/octo-org/octo-repo/releases{/id}",
                        "deployments_url": "https://api.github.com/repos/octo-org/octo-repo/deployments",
                    },
                },
                {
                    "id": 3,
                    "name": "Build",
                    "node_id": "MDEyOldvcmtmbG93IFJ1bjI2OTI4OQ==",
                    "check_suite_id": 42,
                    "check_suite_node_id": "MDEwOkNoZWNrU3VpdGU0Mg==",
                    "head_branch": "master",
                    "head_sha": "acb5820ced9479c074f688cc328bf03f341a511d",
                    "run_number": 562,
                    "event": "push",
                    "status": "queued",
                    "conclusion": None,
                    "workflow_id": 159038,
                    "url": "https://api.github.com/repos/octo-org/octo-repo/actions/runs/30433642",
                    "html_url": "https://github.com/octo-org/octo-repo/actions/runs/30433642",
                    "pull_requests": [],
                    "created_at": "2020-01-22T19:33:08Z",
                    "updated_at": "2020-01-22T19:33:08Z",
                    "actor": {
                        "login": "octocat",
                        "id": 1,
                        "node_id": "MDQ6VXNlcjE=",
                        "avatar_url": "https://github.com/images/error/octocat_happy.gif",
                        "gravatar_id": "",
                        "url": "https://api.github.com/users/octocat",
                        "html_url": "https://github.com/octocat",
                        "followers_url": "https://api.github.com/users/octocat/followers",
                        "following_url": "https://api.github.com/users/octocat/following{/other_user}",
                        "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
                        "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
                        "subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
                        "organizations_url": "https://api.github.com/users/octocat/orgs",
                        "repos_url": "https://api.github.com/users/octocat/repos",
                        "events_url": "https://api.github.com/users/octocat/events{/privacy}",
                        "received_events_url": "https://api.github.com/users/octocat/received_events",
                        "type": "User",
                        "site_admin": False,
                    },
                    "run_attempt": 1,
                    "run_started_at": "2020-01-22T19:33:08Z",
                    "triggering_actor": {
                        "login": "octocat",
                        "id": 1,
                        "node_id": "MDQ6VXNlcjE=",
                        "avatar_url": "https://github.com/images/error/octocat_happy.gif",
                        "gravatar_id": "",
                        "url": "https://api.github.com/users/octocat",
                        "html_url": "https://github.com/octocat",
                        "followers_url": "https://api.github.com/users/octocat/followers",
                        "following_url": "https://api.github.com/users/octocat/following{/other_user}",
                        "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
                        "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
                        "subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
                        "organizations_url": "https://api.github.com/users/octocat/orgs",
                        "repos_url": "https://api.github.com/users/octocat/repos",
                        "events_url": "https://api.github.com/users/octocat/events{/privacy}",
                        "received_events_url": "https://api.github.com/users/octocat/received_events",
                        "type": "User",
                        "site_admin": False,
                    },
                    "jobs_url": "https://api.github.com/repos/octo-org/octo-repo/actions/runs/30433642/jobs",
                    "logs_url": "https://api.github.com/repos/octo-org/octo-repo/actions/runs/30433642/logs",
                    "check_suite_url": "https://api.github.com/repos/octo-org/octo-repo/check-suites/414944374",
                    "artifacts_url": "https://api.github.com/repos/octo-org/octo-repo/actions/runs/30433642/artifacts",
                    "cancel_url": "https://api.github.com/repos/octo-org/octo-repo/actions/runs/30433642/cancel",
                    "rerun_url": "https://api.github.com/repos/octo-org/octo-repo/actions/runs/30433642/rerun",
                    "workflow_url": "https://api.github.com/repos/octo-org/octo-repo/actions/workflows/159038",
                    "head_commit": {
                        "id": "acb5820ced9479c074f688cc328bf03f341a511d",
                        "tree_id": "d23f6eedb1e1b9610bbc754ddb5197bfe7271223",
                        "message": "Create linter.yaml",
                        "timestamp": "2020-01-22T19:33:05Z",
                        "author": {
                            "name": "Octo Cat",
                            "email": "octocat@github.com",
                        },
                        "committer": {
                            "name": "GitHub",
                            "email": "noreply@github.com",
                        },
                    },
                    "repository": {
                        "id": 1296269,
                        "node_id": "MDEwOlJlcG9zaXRvcnkxMjk2MjY5",
                        "name": "Hello-World",
                        "full_name": "octocat/Hello-World",
                        "owner": {
                            "login": "octocat",
                            "id": 1,
                            "node_id": "MDQ6VXNlcjE=",
                            "avatar_url": "https://github.com/images/error/octocat_happy.gif",
                            "gravatar_id": "",
                            "url": "https://api.github.com/users/octocat",
                            "html_url": "https://github.com/octocat",
                            "followers_url": "https://api.github.com/users/octocat/followers",
                            "following_url": "https://api.github.com/users/octocat/following{/other_user}",
                            "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
                            "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
                            "subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
                            "organizations_url": "https://api.github.com/users/octocat/orgs",
                            "repos_url": "https://api.github.com/users/octocat/repos",
                            "events_url": "https://api.github.com/users/octocat/events{/privacy}",
                            "received_events_url": "https://api.github.com/users/octocat/received_events",
                            "type": "User",
                            "site_admin": False,
                        },
                        "private": False,
                        "html_url": "https://github.com/octocat/Hello-World",
                        "description": "This your first repo!",
                        "fork": False,
                        "url": "https://api.github.com/repos/octocat/Hello-World",
                        "archive_url": "https://api.github.com/repos/octocat/Hello-World/{archive_format}{/ref}",
                        "assignees_url": "https://api.github.com/repos/octocat/Hello-World/assignees{/user}",
                        "blobs_url": "https://api.github.com/repos/octocat/Hello-World/git/blobs{/sha}",
                        "branches_url": "https://api.github.com/repos/octocat/Hello-World/branches{/branch}",
                        "collaborators_url": "https://api.github.com/repos/octocat/Hello-World/collaborators{/collaborator}",
                        "comments_url": "https://api.github.com/repos/octocat/Hello-World/comments{/number}",
                        "commits_url": "https://api.github.com/repos/octocat/Hello-World/commits{/sha}",
                        "compare_url": "https://api.github.com/repos/octocat/Hello-World/compare/{base}...{head}",
                        "contents_url": "https://api.github.com/repos/octocat/Hello-World/contents/{+path}",
                        "contributors_url": "https://api.github.com/repos/octocat/Hello-World/contributors",
                        "deployments_url": "https://api.github.com/repos/octocat/Hello-World/deployments",
                        "downloads_url": "https://api.github.com/repos/octocat/Hello-World/downloads",
                        "events_url": "https://api.github.com/repos/octocat/Hello-World/events",
                        "forks_url": "https://api.github.com/repos/octocat/Hello-World/forks",
                        "git_commits_url": "https://api.github.com/repos/octocat/Hello-World/git/commits{/sha}",
                        "git_refs_url": "https://api.github.com/repos/octocat/Hello-World/git/refs{/sha}",
                        "git_tags_url": "https://api.github.com/repos/octocat/Hello-World/git/tags{/sha}",
                        "git_url": "git:github.com/octocat/Hello-World.git",
                        "issue_comment_url": "https://api.github.com/repos/octocat/Hello-World/issues/comments{/number}",
                        "issue_events_url": "https://api.github.com/repos/octocat/Hello-World/issues/events{/number}",
                        "issues_url": "https://api.github.com/repos/octocat/Hello-World/issues{/number}",
                        "keys_url": "https://api.github.com/repos/octocat/Hello-World/keys{/key_id}",
                        "labels_url": "https://api.github.com/repos/octocat/Hello-World/labels{/name}",
                        "languages_url": "https://api.github.com/repos/octocat/Hello-World/languages",
                        "merges_url": "https://api.github.com/repos/octocat/Hello-World/merges",
                        "milestones_url": "https://api.github.com/repos/octocat/Hello-World/milestones{/number}",
                        "notifications_url": "https://api.github.com/repos/octocat/Hello-World/notifications{?since,all,participating}",
                        "pulls_url": "https://api.github.com/repos/octocat/Hello-World/pulls{/number}",
                        "releases_url": "https://api.github.com/repos/octocat/Hello-World/releases{/id}",
                        "ssh_url": "git@github.com:octocat/Hello-World.git",
                        "stargazers_url": "https://api.github.com/repos/octocat/Hello-World/stargazers",
                        "statuses_url": "https://api.github.com/repos/octocat/Hello-World/statuses/{sha}",
                        "subscribers_url": "https://api.github.com/repos/octocat/Hello-World/subscribers",
                        "subscription_url": "https://api.github.com/repos/octocat/Hello-World/subscription",
                        "tags_url": "https://api.github.com/repos/octocat/Hello-World/tags",
                        "teams_url": "https://api.github.com/repos/octocat/Hello-World/teams",
                        "trees_url": "https://api.github.com/repos/octocat/Hello-World/git/trees{/sha}",
                        "hooks_url": "http://api.github.com/repos/octocat/Hello-World/hooks",
                    },
                    "head_repository": {
                        "id": 217723378,
                        "node_id": "MDEwOlJlcG9zaXRvcnkyMTc3MjMzNzg=",
                        "name": "octo-repo",
                        "full_name": "octo-org/octo-repo",
                        "private": True,
                        "owner": {
                            "login": "octocat",
                            "id": 1,
                            "node_id": "MDQ6VXNlcjE=",
                            "avatar_url": "https://github.com/images/error/octocat_happy.gif",
                            "gravatar_id": "",
                            "url": "https://api.github.com/users/octocat",
                            "html_url": "https://github.com/octocat",
                            "followers_url": "https://api.github.com/users/octocat/followers",
                            "following_url": "https://api.github.com/users/octocat/following{/other_user}",
                            "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
                            "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
                            "subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
                            "organizations_url": "https://api.github.com/users/octocat/orgs",
                            "repos_url": "https://api.github.com/users/octocat/repos",
                            "events_url": "https://api.github.com/users/octocat/events{/privacy}",
                            "received_events_url": "https://api.github.com/users/octocat/received_events",
                            "type": "User",
                            "site_admin": False,
                        },
                        "html_url": "https://github.com/octo-org/octo-repo",
                        "description": None,
                        "fork": False,
                        "url": "https://api.github.com/repos/octo-org/octo-repo",
                        "forks_url": "https://api.github.com/repos/octo-org/octo-repo/forks",
                        "keys_url": "https://api.github.com/repos/octo-org/octo-repo/keys{/key_id}",
                        "collaborators_url": "https://api.github.com/repos/octo-org/octo-repo/collaborators{/collaborator}",
                        "teams_url": "https://api.github.com/repos/octo-org/octo-repo/teams",
                        "hooks_url": "https://api.github.com/repos/octo-org/octo-repo/hooks",
                        "issue_events_url": "https://api.github.com/repos/octo-org/octo-repo/issues/events{/number}",
                        "events_url": "https://api.github.com/repos/octo-org/octo-repo/events",
                        "assignees_url": "https://api.github.com/repos/octo-org/octo-repo/assignees{/user}",
                        "branches_url": "https://api.github.com/repos/octo-org/octo-repo/branches{/branch}",
                        "tags_url": "https://api.github.com/repos/octo-org/octo-repo/tags",
                        "blobs_url": "https://api.github.com/repos/octo-org/octo-repo/git/blobs{/sha}",
                        "git_tags_url": "https://api.github.com/repos/octo-org/octo-repo/git/tags{/sha}",
                        "git_refs_url": "https://api.github.com/repos/octo-org/octo-repo/git/refs{/sha}",
                        "trees_url": "https://api.github.com/repos/octo-org/octo-repo/git/trees{/sha}",
                        "statuses_url": "https://api.github.com/repos/octo-org/octo-repo/statuses/{sha}",
                        "languages_url": "https://api.github.com/repos/octo-org/octo-repo/languages",
                        "stargazers_url": "https://api.github.com/repos/octo-org/octo-repo/stargazers",
                        "contributors_url": "https://api.github.com/repos/octo-org/octo-repo/contributors",
                        "subscribers_url": "https://api.github.com/repos/octo-org/octo-repo/subscribers",
                        "subscription_url": "https://api.github.com/repos/octo-org/octo-repo/subscription",
                        "commits_url": "https://api.github.com/repos/octo-org/octo-repo/commits{/sha}",
                        "git_commits_url": "https://api.github.com/repos/octo-org/octo-repo/git/commits{/sha}",
                        "comments_url": "https://api.github.com/repos/octo-org/octo-repo/comments{/number}",
                        "issue_comment_url": "https://api.github.com/repos/octo-org/octo-repo/issues/comments{/number}",
                        "contents_url": "https://api.github.com/repos/octo-org/octo-repo/contents/{+path}",
                        "compare_url": "https://api.github.com/repos/octo-org/octo-repo/compare/{base}...{head}",
                        "merges_url": "https://api.github.com/repos/octo-org/octo-repo/merges",
                        "archive_url": "https://api.github.com/repos/octo-org/octo-repo/{archive_format}{/ref}",
                        "downloads_url": "https://api.github.com/repos/octo-org/octo-repo/downloads",
                        "issues_url": "https://api.github.com/repos/octo-org/octo-repo/issues{/number}",
                        "pulls_url": "https://api.github.com/repos/octo-org/octo-repo/pulls{/number}",
                        "milestones_url": "https://api.github.com/repos/octo-org/octo-repo/milestones{/number}",
                        "notifications_url": "https://api.github.com/repos/octo-org/octo-repo/notifications{?since,all,participating}",
                        "labels_url": "https://api.github.com/repos/octo-org/octo-repo/labels{/name}",
                        "releases_url": "https://api.github.com/repos/octo-org/octo-repo/releases{/id}",
                        "deployments_url": "https://api.github.com/repos/octo-org/octo-repo/deployments",
                    },
                },
            ]
        }

        self.client.get_all.return_value = AsyncIteratorMock(
            [response1, response2]
        )

        async_it = aiter(
            self.api.get_workflow_runs(
                "foo/bar",
                actor="foo",
                branch="stable",
                exclude_pull_requests=True,
            )
        )
        run = await anext(async_it)
        self.assertEqual(run.id, 1)
        run = await anext(async_it)
        self.assertEqual(run.id, 2)
        run = await anext(async_it)
        self.assertEqual(run.id, 3)

        with self.assertRaises(StopAsyncIteration):
            await anext(async_it)

        self.client.get_all.assert_called_once_with(
            "/repos/foo/bar/actions/runs",
            params={
                "actor": "foo",
                "branch": "stable",
                "exclude_pull_requests": True,
                "per_page": "100",
            },
        )

    async def test_get_workflow_runs_for_workflow(self):
        response1 = create_response()
        response1.json.return_value = {
            "workflow_runs": [
                {
                    "id": 1,
                    "name": "Build",
                    "node_id": "MDEyOldvcmtmbG93IFJ1bjI2OTI4OQ==",
                    "check_suite_id": 42,
                    "check_suite_node_id": "MDEwOkNoZWNrU3VpdGU0Mg==",
                    "head_branch": "master",
                    "head_sha": "acb5820ced9479c074f688cc328bf03f341a511d",
                    "run_number": 562,
                    "event": "push",
                    "status": "queued",
                    "conclusion": None,
                    "workflow_id": 159038,
                    "url": "https://api.github.com/repos/octo-org/octo-repo/actions/runs/30433642",
                    "html_url": "https://github.com/octo-org/octo-repo/actions/runs/30433642",
                    "pull_requests": [],
                    "created_at": "2020-01-22T19:33:08Z",
                    "updated_at": "2020-01-22T19:33:08Z",
                    "actor": {
                        "login": "octocat",
                        "id": 1,
                        "node_id": "MDQ6VXNlcjE=",
                        "avatar_url": "https://github.com/images/error/octocat_happy.gif",
                        "gravatar_id": "",
                        "url": "https://api.github.com/users/octocat",
                        "html_url": "https://github.com/octocat",
                        "followers_url": "https://api.github.com/users/octocat/followers",
                        "following_url": "https://api.github.com/users/octocat/following{/other_user}",
                        "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
                        "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
                        "subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
                        "organizations_url": "https://api.github.com/users/octocat/orgs",
                        "repos_url": "https://api.github.com/users/octocat/repos",
                        "events_url": "https://api.github.com/users/octocat/events{/privacy}",
                        "received_events_url": "https://api.github.com/users/octocat/received_events",
                        "type": "User",
                        "site_admin": False,
                    },
                    "run_attempt": 1,
                    "run_started_at": "2020-01-22T19:33:08Z",
                    "triggering_actor": {
                        "login": "octocat",
                        "id": 1,
                        "node_id": "MDQ6VXNlcjE=",
                        "avatar_url": "https://github.com/images/error/octocat_happy.gif",
                        "gravatar_id": "",
                        "url": "https://api.github.com/users/octocat",
                        "html_url": "https://github.com/octocat",
                        "followers_url": "https://api.github.com/users/octocat/followers",
                        "following_url": "https://api.github.com/users/octocat/following{/other_user}",
                        "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
                        "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
                        "subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
                        "organizations_url": "https://api.github.com/users/octocat/orgs",
                        "repos_url": "https://api.github.com/users/octocat/repos",
                        "events_url": "https://api.github.com/users/octocat/events{/privacy}",
                        "received_events_url": "https://api.github.com/users/octocat/received_events",
                        "type": "User",
                        "site_admin": False,
                    },
                    "jobs_url": "https://api.github.com/repos/octo-org/octo-repo/actions/runs/30433642/jobs",
                    "logs_url": "https://api.github.com/repos/octo-org/octo-repo/actions/runs/30433642/logs",
                    "check_suite_url": "https://api.github.com/repos/octo-org/octo-repo/check-suites/414944374",
                    "artifacts_url": "https://api.github.com/repos/octo-org/octo-repo/actions/runs/30433642/artifacts",
                    "cancel_url": "https://api.github.com/repos/octo-org/octo-repo/actions/runs/30433642/cancel",
                    "rerun_url": "https://api.github.com/repos/octo-org/octo-repo/actions/runs/30433642/rerun",
                    "workflow_url": "https://api.github.com/repos/octo-org/octo-repo/actions/workflows/159038",
                    "head_commit": {
                        "id": "acb5820ced9479c074f688cc328bf03f341a511d",
                        "tree_id": "d23f6eedb1e1b9610bbc754ddb5197bfe7271223",
                        "message": "Create linter.yaml",
                        "timestamp": "2020-01-22T19:33:05Z",
                        "author": {
                            "name": "Octo Cat",
                            "email": "octocat@github.com",
                        },
                        "committer": {
                            "name": "GitHub",
                            "email": "noreply@github.com",
                        },
                    },
                    "repository": {
                        "id": 1296269,
                        "node_id": "MDEwOlJlcG9zaXRvcnkxMjk2MjY5",
                        "name": "Hello-World",
                        "full_name": "octocat/Hello-World",
                        "owner": {
                            "login": "octocat",
                            "id": 1,
                            "node_id": "MDQ6VXNlcjE=",
                            "avatar_url": "https://github.com/images/error/octocat_happy.gif",
                            "gravatar_id": "",
                            "url": "https://api.github.com/users/octocat",
                            "html_url": "https://github.com/octocat",
                            "followers_url": "https://api.github.com/users/octocat/followers",
                            "following_url": "https://api.github.com/users/octocat/following{/other_user}",
                            "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
                            "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
                            "subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
                            "organizations_url": "https://api.github.com/users/octocat/orgs",
                            "repos_url": "https://api.github.com/users/octocat/repos",
                            "events_url": "https://api.github.com/users/octocat/events{/privacy}",
                            "received_events_url": "https://api.github.com/users/octocat/received_events",
                            "type": "User",
                            "site_admin": False,
                        },
                        "private": False,
                        "html_url": "https://github.com/octocat/Hello-World",
                        "description": "This your first repo!",
                        "fork": False,
                        "url": "https://api.github.com/repos/octocat/Hello-World",
                        "archive_url": "https://api.github.com/repos/octocat/Hello-World/{archive_format}{/ref}",
                        "assignees_url": "https://api.github.com/repos/octocat/Hello-World/assignees{/user}",
                        "blobs_url": "https://api.github.com/repos/octocat/Hello-World/git/blobs{/sha}",
                        "branches_url": "https://api.github.com/repos/octocat/Hello-World/branches{/branch}",
                        "collaborators_url": "https://api.github.com/repos/octocat/Hello-World/collaborators{/collaborator}",
                        "comments_url": "https://api.github.com/repos/octocat/Hello-World/comments{/number}",
                        "commits_url": "https://api.github.com/repos/octocat/Hello-World/commits{/sha}",
                        "compare_url": "https://api.github.com/repos/octocat/Hello-World/compare/{base}...{head}",
                        "contents_url": "https://api.github.com/repos/octocat/Hello-World/contents/{+path}",
                        "contributors_url": "https://api.github.com/repos/octocat/Hello-World/contributors",
                        "deployments_url": "https://api.github.com/repos/octocat/Hello-World/deployments",
                        "downloads_url": "https://api.github.com/repos/octocat/Hello-World/downloads",
                        "events_url": "https://api.github.com/repos/octocat/Hello-World/events",
                        "forks_url": "https://api.github.com/repos/octocat/Hello-World/forks",
                        "git_commits_url": "https://api.github.com/repos/octocat/Hello-World/git/commits{/sha}",
                        "git_refs_url": "https://api.github.com/repos/octocat/Hello-World/git/refs{/sha}",
                        "git_tags_url": "https://api.github.com/repos/octocat/Hello-World/git/tags{/sha}",
                        "git_url": "git:github.com/octocat/Hello-World.git",
                        "issue_comment_url": "https://api.github.com/repos/octocat/Hello-World/issues/comments{/number}",
                        "issue_events_url": "https://api.github.com/repos/octocat/Hello-World/issues/events{/number}",
                        "issues_url": "https://api.github.com/repos/octocat/Hello-World/issues{/number}",
                        "keys_url": "https://api.github.com/repos/octocat/Hello-World/keys{/key_id}",
                        "labels_url": "https://api.github.com/repos/octocat/Hello-World/labels{/name}",
                        "languages_url": "https://api.github.com/repos/octocat/Hello-World/languages",
                        "merges_url": "https://api.github.com/repos/octocat/Hello-World/merges",
                        "milestones_url": "https://api.github.com/repos/octocat/Hello-World/milestones{/number}",
                        "notifications_url": "https://api.github.com/repos/octocat/Hello-World/notifications{?since,all,participating}",
                        "pulls_url": "https://api.github.com/repos/octocat/Hello-World/pulls{/number}",
                        "releases_url": "https://api.github.com/repos/octocat/Hello-World/releases{/id}",
                        "ssh_url": "git@github.com:octocat/Hello-World.git",
                        "stargazers_url": "https://api.github.com/repos/octocat/Hello-World/stargazers",
                        "statuses_url": "https://api.github.com/repos/octocat/Hello-World/statuses/{sha}",
                        "subscribers_url": "https://api.github.com/repos/octocat/Hello-World/subscribers",
                        "subscription_url": "https://api.github.com/repos/octocat/Hello-World/subscription",
                        "tags_url": "https://api.github.com/repos/octocat/Hello-World/tags",
                        "teams_url": "https://api.github.com/repos/octocat/Hello-World/teams",
                        "trees_url": "https://api.github.com/repos/octocat/Hello-World/git/trees{/sha}",
                        "hooks_url": "http://api.github.com/repos/octocat/Hello-World/hooks",
                    },
                    "head_repository": {
                        "id": 217723378,
                        "node_id": "MDEwOlJlcG9zaXRvcnkyMTc3MjMzNzg=",
                        "name": "octo-repo",
                        "full_name": "octo-org/octo-repo",
                        "private": True,
                        "owner": {
                            "login": "octocat",
                            "id": 1,
                            "node_id": "MDQ6VXNlcjE=",
                            "avatar_url": "https://github.com/images/error/octocat_happy.gif",
                            "gravatar_id": "",
                            "url": "https://api.github.com/users/octocat",
                            "html_url": "https://github.com/octocat",
                            "followers_url": "https://api.github.com/users/octocat/followers",
                            "following_url": "https://api.github.com/users/octocat/following{/other_user}",
                            "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
                            "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
                            "subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
                            "organizations_url": "https://api.github.com/users/octocat/orgs",
                            "repos_url": "https://api.github.com/users/octocat/repos",
                            "events_url": "https://api.github.com/users/octocat/events{/privacy}",
                            "received_events_url": "https://api.github.com/users/octocat/received_events",
                            "type": "User",
                            "site_admin": False,
                        },
                        "html_url": "https://github.com/octo-org/octo-repo",
                        "description": None,
                        "fork": False,
                        "url": "https://api.github.com/repos/octo-org/octo-repo",
                        "forks_url": "https://api.github.com/repos/octo-org/octo-repo/forks",
                        "keys_url": "https://api.github.com/repos/octo-org/octo-repo/keys{/key_id}",
                        "collaborators_url": "https://api.github.com/repos/octo-org/octo-repo/collaborators{/collaborator}",
                        "teams_url": "https://api.github.com/repos/octo-org/octo-repo/teams",
                        "hooks_url": "https://api.github.com/repos/octo-org/octo-repo/hooks",
                        "issue_events_url": "https://api.github.com/repos/octo-org/octo-repo/issues/events{/number}",
                        "events_url": "https://api.github.com/repos/octo-org/octo-repo/events",
                        "assignees_url": "https://api.github.com/repos/octo-org/octo-repo/assignees{/user}",
                        "branches_url": "https://api.github.com/repos/octo-org/octo-repo/branches{/branch}",
                        "tags_url": "https://api.github.com/repos/octo-org/octo-repo/tags",
                        "blobs_url": "https://api.github.com/repos/octo-org/octo-repo/git/blobs{/sha}",
                        "git_tags_url": "https://api.github.com/repos/octo-org/octo-repo/git/tags{/sha}",
                        "git_refs_url": "https://api.github.com/repos/octo-org/octo-repo/git/refs{/sha}",
                        "trees_url": "https://api.github.com/repos/octo-org/octo-repo/git/trees{/sha}",
                        "statuses_url": "https://api.github.com/repos/octo-org/octo-repo/statuses/{sha}",
                        "languages_url": "https://api.github.com/repos/octo-org/octo-repo/languages",
                        "stargazers_url": "https://api.github.com/repos/octo-org/octo-repo/stargazers",
                        "contributors_url": "https://api.github.com/repos/octo-org/octo-repo/contributors",
                        "subscribers_url": "https://api.github.com/repos/octo-org/octo-repo/subscribers",
                        "subscription_url": "https://api.github.com/repos/octo-org/octo-repo/subscription",
                        "commits_url": "https://api.github.com/repos/octo-org/octo-repo/commits{/sha}",
                        "git_commits_url": "https://api.github.com/repos/octo-org/octo-repo/git/commits{/sha}",
                        "comments_url": "https://api.github.com/repos/octo-org/octo-repo/comments{/number}",
                        "issue_comment_url": "https://api.github.com/repos/octo-org/octo-repo/issues/comments{/number}",
                        "contents_url": "https://api.github.com/repos/octo-org/octo-repo/contents/{+path}",
                        "compare_url": "https://api.github.com/repos/octo-org/octo-repo/compare/{base}...{head}",
                        "merges_url": "https://api.github.com/repos/octo-org/octo-repo/merges",
                        "archive_url": "https://api.github.com/repos/octo-org/octo-repo/{archive_format}{/ref}",
                        "downloads_url": "https://api.github.com/repos/octo-org/octo-repo/downloads",
                        "issues_url": "https://api.github.com/repos/octo-org/octo-repo/issues{/number}",
                        "pulls_url": "https://api.github.com/repos/octo-org/octo-repo/pulls{/number}",
                        "milestones_url": "https://api.github.com/repos/octo-org/octo-repo/milestones{/number}",
                        "notifications_url": "https://api.github.com/repos/octo-org/octo-repo/notifications{?since,all,participating}",
                        "labels_url": "https://api.github.com/repos/octo-org/octo-repo/labels{/name}",
                        "releases_url": "https://api.github.com/repos/octo-org/octo-repo/releases{/id}",
                        "deployments_url": "https://api.github.com/repos/octo-org/octo-repo/deployments",
                    },
                }
            ]
        }
        response2 = create_response()
        response2.json.return_value = {
            "workflow_runs": [
                {
                    "id": 2,
                    "name": "Build",
                    "node_id": "MDEyOldvcmtmbG93IFJ1bjI2OTI4OQ==",
                    "check_suite_id": 42,
                    "check_suite_node_id": "MDEwOkNoZWNrU3VpdGU0Mg==",
                    "head_branch": "master",
                    "head_sha": "acb5820ced9479c074f688cc328bf03f341a511d",
                    "run_number": 562,
                    "event": "push",
                    "status": "queued",
                    "conclusion": None,
                    "workflow_id": 159038,
                    "url": "https://api.github.com/repos/octo-org/octo-repo/actions/runs/30433642",
                    "html_url": "https://github.com/octo-org/octo-repo/actions/runs/30433642",
                    "pull_requests": [],
                    "created_at": "2020-01-22T19:33:08Z",
                    "updated_at": "2020-01-22T19:33:08Z",
                    "actor": {
                        "login": "octocat",
                        "id": 1,
                        "node_id": "MDQ6VXNlcjE=",
                        "avatar_url": "https://github.com/images/error/octocat_happy.gif",
                        "gravatar_id": "",
                        "url": "https://api.github.com/users/octocat",
                        "html_url": "https://github.com/octocat",
                        "followers_url": "https://api.github.com/users/octocat/followers",
                        "following_url": "https://api.github.com/users/octocat/following{/other_user}",
                        "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
                        "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
                        "subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
                        "organizations_url": "https://api.github.com/users/octocat/orgs",
                        "repos_url": "https://api.github.com/users/octocat/repos",
                        "events_url": "https://api.github.com/users/octocat/events{/privacy}",
                        "received_events_url": "https://api.github.com/users/octocat/received_events",
                        "type": "User",
                        "site_admin": False,
                    },
                    "run_attempt": 1,
                    "run_started_at": "2020-01-22T19:33:08Z",
                    "triggering_actor": {
                        "login": "octocat",
                        "id": 1,
                        "node_id": "MDQ6VXNlcjE=",
                        "avatar_url": "https://github.com/images/error/octocat_happy.gif",
                        "gravatar_id": "",
                        "url": "https://api.github.com/users/octocat",
                        "html_url": "https://github.com/octocat",
                        "followers_url": "https://api.github.com/users/octocat/followers",
                        "following_url": "https://api.github.com/users/octocat/following{/other_user}",
                        "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
                        "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
                        "subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
                        "organizations_url": "https://api.github.com/users/octocat/orgs",
                        "repos_url": "https://api.github.com/users/octocat/repos",
                        "events_url": "https://api.github.com/users/octocat/events{/privacy}",
                        "received_events_url": "https://api.github.com/users/octocat/received_events",
                        "type": "User",
                        "site_admin": False,
                    },
                    "jobs_url": "https://api.github.com/repos/octo-org/octo-repo/actions/runs/30433642/jobs",
                    "logs_url": "https://api.github.com/repos/octo-org/octo-repo/actions/runs/30433642/logs",
                    "check_suite_url": "https://api.github.com/repos/octo-org/octo-repo/check-suites/414944374",
                    "artifacts_url": "https://api.github.com/repos/octo-org/octo-repo/actions/runs/30433642/artifacts",
                    "cancel_url": "https://api.github.com/repos/octo-org/octo-repo/actions/runs/30433642/cancel",
                    "rerun_url": "https://api.github.com/repos/octo-org/octo-repo/actions/runs/30433642/rerun",
                    "workflow_url": "https://api.github.com/repos/octo-org/octo-repo/actions/workflows/159038",
                    "head_commit": {
                        "id": "acb5820ced9479c074f688cc328bf03f341a511d",
                        "tree_id": "d23f6eedb1e1b9610bbc754ddb5197bfe7271223",
                        "message": "Create linter.yaml",
                        "timestamp": "2020-01-22T19:33:05Z",
                        "author": {
                            "name": "Octo Cat",
                            "email": "octocat@github.com",
                        },
                        "committer": {
                            "name": "GitHub",
                            "email": "noreply@github.com",
                        },
                    },
                    "repository": {
                        "id": 1296269,
                        "node_id": "MDEwOlJlcG9zaXRvcnkxMjk2MjY5",
                        "name": "Hello-World",
                        "full_name": "octocat/Hello-World",
                        "owner": {
                            "login": "octocat",
                            "id": 1,
                            "node_id": "MDQ6VXNlcjE=",
                            "avatar_url": "https://github.com/images/error/octocat_happy.gif",
                            "gravatar_id": "",
                            "url": "https://api.github.com/users/octocat",
                            "html_url": "https://github.com/octocat",
                            "followers_url": "https://api.github.com/users/octocat/followers",
                            "following_url": "https://api.github.com/users/octocat/following{/other_user}",
                            "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
                            "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
                            "subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
                            "organizations_url": "https://api.github.com/users/octocat/orgs",
                            "repos_url": "https://api.github.com/users/octocat/repos",
                            "events_url": "https://api.github.com/users/octocat/events{/privacy}",
                            "received_events_url": "https://api.github.com/users/octocat/received_events",
                            "type": "User",
                            "site_admin": False,
                        },
                        "private": False,
                        "html_url": "https://github.com/octocat/Hello-World",
                        "description": "This your first repo!",
                        "fork": False,
                        "url": "https://api.github.com/repos/octocat/Hello-World",
                        "archive_url": "https://api.github.com/repos/octocat/Hello-World/{archive_format}{/ref}",
                        "assignees_url": "https://api.github.com/repos/octocat/Hello-World/assignees{/user}",
                        "blobs_url": "https://api.github.com/repos/octocat/Hello-World/git/blobs{/sha}",
                        "branches_url": "https://api.github.com/repos/octocat/Hello-World/branches{/branch}",
                        "collaborators_url": "https://api.github.com/repos/octocat/Hello-World/collaborators{/collaborator}",
                        "comments_url": "https://api.github.com/repos/octocat/Hello-World/comments{/number}",
                        "commits_url": "https://api.github.com/repos/octocat/Hello-World/commits{/sha}",
                        "compare_url": "https://api.github.com/repos/octocat/Hello-World/compare/{base}...{head}",
                        "contents_url": "https://api.github.com/repos/octocat/Hello-World/contents/{+path}",
                        "contributors_url": "https://api.github.com/repos/octocat/Hello-World/contributors",
                        "deployments_url": "https://api.github.com/repos/octocat/Hello-World/deployments",
                        "downloads_url": "https://api.github.com/repos/octocat/Hello-World/downloads",
                        "events_url": "https://api.github.com/repos/octocat/Hello-World/events",
                        "forks_url": "https://api.github.com/repos/octocat/Hello-World/forks",
                        "git_commits_url": "https://api.github.com/repos/octocat/Hello-World/git/commits{/sha}",
                        "git_refs_url": "https://api.github.com/repos/octocat/Hello-World/git/refs{/sha}",
                        "git_tags_url": "https://api.github.com/repos/octocat/Hello-World/git/tags{/sha}",
                        "git_url": "git:github.com/octocat/Hello-World.git",
                        "issue_comment_url": "https://api.github.com/repos/octocat/Hello-World/issues/comments{/number}",
                        "issue_events_url": "https://api.github.com/repos/octocat/Hello-World/issues/events{/number}",
                        "issues_url": "https://api.github.com/repos/octocat/Hello-World/issues{/number}",
                        "keys_url": "https://api.github.com/repos/octocat/Hello-World/keys{/key_id}",
                        "labels_url": "https://api.github.com/repos/octocat/Hello-World/labels{/name}",
                        "languages_url": "https://api.github.com/repos/octocat/Hello-World/languages",
                        "merges_url": "https://api.github.com/repos/octocat/Hello-World/merges",
                        "milestones_url": "https://api.github.com/repos/octocat/Hello-World/milestones{/number}",
                        "notifications_url": "https://api.github.com/repos/octocat/Hello-World/notifications{?since,all,participating}",
                        "pulls_url": "https://api.github.com/repos/octocat/Hello-World/pulls{/number}",
                        "releases_url": "https://api.github.com/repos/octocat/Hello-World/releases{/id}",
                        "ssh_url": "git@github.com:octocat/Hello-World.git",
                        "stargazers_url": "https://api.github.com/repos/octocat/Hello-World/stargazers",
                        "statuses_url": "https://api.github.com/repos/octocat/Hello-World/statuses/{sha}",
                        "subscribers_url": "https://api.github.com/repos/octocat/Hello-World/subscribers",
                        "subscription_url": "https://api.github.com/repos/octocat/Hello-World/subscription",
                        "tags_url": "https://api.github.com/repos/octocat/Hello-World/tags",
                        "teams_url": "https://api.github.com/repos/octocat/Hello-World/teams",
                        "trees_url": "https://api.github.com/repos/octocat/Hello-World/git/trees{/sha}",
                        "hooks_url": "http://api.github.com/repos/octocat/Hello-World/hooks",
                    },
                    "head_repository": {
                        "id": 217723378,
                        "node_id": "MDEwOlJlcG9zaXRvcnkyMTc3MjMzNzg=",
                        "name": "octo-repo",
                        "full_name": "octo-org/octo-repo",
                        "private": True,
                        "owner": {
                            "login": "octocat",
                            "id": 1,
                            "node_id": "MDQ6VXNlcjE=",
                            "avatar_url": "https://github.com/images/error/octocat_happy.gif",
                            "gravatar_id": "",
                            "url": "https://api.github.com/users/octocat",
                            "html_url": "https://github.com/octocat",
                            "followers_url": "https://api.github.com/users/octocat/followers",
                            "following_url": "https://api.github.com/users/octocat/following{/other_user}",
                            "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
                            "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
                            "subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
                            "organizations_url": "https://api.github.com/users/octocat/orgs",
                            "repos_url": "https://api.github.com/users/octocat/repos",
                            "events_url": "https://api.github.com/users/octocat/events{/privacy}",
                            "received_events_url": "https://api.github.com/users/octocat/received_events",
                            "type": "User",
                            "site_admin": False,
                        },
                        "html_url": "https://github.com/octo-org/octo-repo",
                        "description": None,
                        "fork": False,
                        "url": "https://api.github.com/repos/octo-org/octo-repo",
                        "forks_url": "https://api.github.com/repos/octo-org/octo-repo/forks",
                        "keys_url": "https://api.github.com/repos/octo-org/octo-repo/keys{/key_id}",
                        "collaborators_url": "https://api.github.com/repos/octo-org/octo-repo/collaborators{/collaborator}",
                        "teams_url": "https://api.github.com/repos/octo-org/octo-repo/teams",
                        "hooks_url": "https://api.github.com/repos/octo-org/octo-repo/hooks",
                        "issue_events_url": "https://api.github.com/repos/octo-org/octo-repo/issues/events{/number}",
                        "events_url": "https://api.github.com/repos/octo-org/octo-repo/events",
                        "assignees_url": "https://api.github.com/repos/octo-org/octo-repo/assignees{/user}",
                        "branches_url": "https://api.github.com/repos/octo-org/octo-repo/branches{/branch}",
                        "tags_url": "https://api.github.com/repos/octo-org/octo-repo/tags",
                        "blobs_url": "https://api.github.com/repos/octo-org/octo-repo/git/blobs{/sha}",
                        "git_tags_url": "https://api.github.com/repos/octo-org/octo-repo/git/tags{/sha}",
                        "git_refs_url": "https://api.github.com/repos/octo-org/octo-repo/git/refs{/sha}",
                        "trees_url": "https://api.github.com/repos/octo-org/octo-repo/git/trees{/sha}",
                        "statuses_url": "https://api.github.com/repos/octo-org/octo-repo/statuses/{sha}",
                        "languages_url": "https://api.github.com/repos/octo-org/octo-repo/languages",
                        "stargazers_url": "https://api.github.com/repos/octo-org/octo-repo/stargazers",
                        "contributors_url": "https://api.github.com/repos/octo-org/octo-repo/contributors",
                        "subscribers_url": "https://api.github.com/repos/octo-org/octo-repo/subscribers",
                        "subscription_url": "https://api.github.com/repos/octo-org/octo-repo/subscription",
                        "commits_url": "https://api.github.com/repos/octo-org/octo-repo/commits{/sha}",
                        "git_commits_url": "https://api.github.com/repos/octo-org/octo-repo/git/commits{/sha}",
                        "comments_url": "https://api.github.com/repos/octo-org/octo-repo/comments{/number}",
                        "issue_comment_url": "https://api.github.com/repos/octo-org/octo-repo/issues/comments{/number}",
                        "contents_url": "https://api.github.com/repos/octo-org/octo-repo/contents/{+path}",
                        "compare_url": "https://api.github.com/repos/octo-org/octo-repo/compare/{base}...{head}",
                        "merges_url": "https://api.github.com/repos/octo-org/octo-repo/merges",
                        "archive_url": "https://api.github.com/repos/octo-org/octo-repo/{archive_format}{/ref}",
                        "downloads_url": "https://api.github.com/repos/octo-org/octo-repo/downloads",
                        "issues_url": "https://api.github.com/repos/octo-org/octo-repo/issues{/number}",
                        "pulls_url": "https://api.github.com/repos/octo-org/octo-repo/pulls{/number}",
                        "milestones_url": "https://api.github.com/repos/octo-org/octo-repo/milestones{/number}",
                        "notifications_url": "https://api.github.com/repos/octo-org/octo-repo/notifications{?since,all,participating}",
                        "labels_url": "https://api.github.com/repos/octo-org/octo-repo/labels{/name}",
                        "releases_url": "https://api.github.com/repos/octo-org/octo-repo/releases{/id}",
                        "deployments_url": "https://api.github.com/repos/octo-org/octo-repo/deployments",
                    },
                },
                {
                    "id": 3,
                    "name": "Build",
                    "node_id": "MDEyOldvcmtmbG93IFJ1bjI2OTI4OQ==",
                    "check_suite_id": 42,
                    "check_suite_node_id": "MDEwOkNoZWNrU3VpdGU0Mg==",
                    "head_branch": "master",
                    "head_sha": "acb5820ced9479c074f688cc328bf03f341a511d",
                    "run_number": 562,
                    "event": "push",
                    "status": "queued",
                    "conclusion": None,
                    "workflow_id": 159038,
                    "url": "https://api.github.com/repos/octo-org/octo-repo/actions/runs/30433642",
                    "html_url": "https://github.com/octo-org/octo-repo/actions/runs/30433642",
                    "pull_requests": [],
                    "created_at": "2020-01-22T19:33:08Z",
                    "updated_at": "2020-01-22T19:33:08Z",
                    "actor": {
                        "login": "octocat",
                        "id": 1,
                        "node_id": "MDQ6VXNlcjE=",
                        "avatar_url": "https://github.com/images/error/octocat_happy.gif",
                        "gravatar_id": "",
                        "url": "https://api.github.com/users/octocat",
                        "html_url": "https://github.com/octocat",
                        "followers_url": "https://api.github.com/users/octocat/followers",
                        "following_url": "https://api.github.com/users/octocat/following{/other_user}",
                        "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
                        "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
                        "subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
                        "organizations_url": "https://api.github.com/users/octocat/orgs",
                        "repos_url": "https://api.github.com/users/octocat/repos",
                        "events_url": "https://api.github.com/users/octocat/events{/privacy}",
                        "received_events_url": "https://api.github.com/users/octocat/received_events",
                        "type": "User",
                        "site_admin": False,
                    },
                    "run_attempt": 1,
                    "run_started_at": "2020-01-22T19:33:08Z",
                    "triggering_actor": {
                        "login": "octocat",
                        "id": 1,
                        "node_id": "MDQ6VXNlcjE=",
                        "avatar_url": "https://github.com/images/error/octocat_happy.gif",
                        "gravatar_id": "",
                        "url": "https://api.github.com/users/octocat",
                        "html_url": "https://github.com/octocat",
                        "followers_url": "https://api.github.com/users/octocat/followers",
                        "following_url": "https://api.github.com/users/octocat/following{/other_user}",
                        "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
                        "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
                        "subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
                        "organizations_url": "https://api.github.com/users/octocat/orgs",
                        "repos_url": "https://api.github.com/users/octocat/repos",
                        "events_url": "https://api.github.com/users/octocat/events{/privacy}",
                        "received_events_url": "https://api.github.com/users/octocat/received_events",
                        "type": "User",
                        "site_admin": False,
                    },
                    "jobs_url": "https://api.github.com/repos/octo-org/octo-repo/actions/runs/30433642/jobs",
                    "logs_url": "https://api.github.com/repos/octo-org/octo-repo/actions/runs/30433642/logs",
                    "check_suite_url": "https://api.github.com/repos/octo-org/octo-repo/check-suites/414944374",
                    "artifacts_url": "https://api.github.com/repos/octo-org/octo-repo/actions/runs/30433642/artifacts",
                    "cancel_url": "https://api.github.com/repos/octo-org/octo-repo/actions/runs/30433642/cancel",
                    "rerun_url": "https://api.github.com/repos/octo-org/octo-repo/actions/runs/30433642/rerun",
                    "workflow_url": "https://api.github.com/repos/octo-org/octo-repo/actions/workflows/159038",
                    "head_commit": {
                        "id": "acb5820ced9479c074f688cc328bf03f341a511d",
                        "tree_id": "d23f6eedb1e1b9610bbc754ddb5197bfe7271223",
                        "message": "Create linter.yaml",
                        "timestamp": "2020-01-22T19:33:05Z",
                        "author": {
                            "name": "Octo Cat",
                            "email": "octocat@github.com",
                        },
                        "committer": {
                            "name": "GitHub",
                            "email": "noreply@github.com",
                        },
                    },
                    "repository": {
                        "id": 1296269,
                        "node_id": "MDEwOlJlcG9zaXRvcnkxMjk2MjY5",
                        "name": "Hello-World",
                        "full_name": "octocat/Hello-World",
                        "owner": {
                            "login": "octocat",
                            "id": 1,
                            "node_id": "MDQ6VXNlcjE=",
                            "avatar_url": "https://github.com/images/error/octocat_happy.gif",
                            "gravatar_id": "",
                            "url": "https://api.github.com/users/octocat",
                            "html_url": "https://github.com/octocat",
                            "followers_url": "https://api.github.com/users/octocat/followers",
                            "following_url": "https://api.github.com/users/octocat/following{/other_user}",
                            "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
                            "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
                            "subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
                            "organizations_url": "https://api.github.com/users/octocat/orgs",
                            "repos_url": "https://api.github.com/users/octocat/repos",
                            "events_url": "https://api.github.com/users/octocat/events{/privacy}",
                            "received_events_url": "https://api.github.com/users/octocat/received_events",
                            "type": "User",
                            "site_admin": False,
                        },
                        "private": False,
                        "html_url": "https://github.com/octocat/Hello-World",
                        "description": "This your first repo!",
                        "fork": False,
                        "url": "https://api.github.com/repos/octocat/Hello-World",
                        "archive_url": "https://api.github.com/repos/octocat/Hello-World/{archive_format}{/ref}",
                        "assignees_url": "https://api.github.com/repos/octocat/Hello-World/assignees{/user}",
                        "blobs_url": "https://api.github.com/repos/octocat/Hello-World/git/blobs{/sha}",
                        "branches_url": "https://api.github.com/repos/octocat/Hello-World/branches{/branch}",
                        "collaborators_url": "https://api.github.com/repos/octocat/Hello-World/collaborators{/collaborator}",
                        "comments_url": "https://api.github.com/repos/octocat/Hello-World/comments{/number}",
                        "commits_url": "https://api.github.com/repos/octocat/Hello-World/commits{/sha}",
                        "compare_url": "https://api.github.com/repos/octocat/Hello-World/compare/{base}...{head}",
                        "contents_url": "https://api.github.com/repos/octocat/Hello-World/contents/{+path}",
                        "contributors_url": "https://api.github.com/repos/octocat/Hello-World/contributors",
                        "deployments_url": "https://api.github.com/repos/octocat/Hello-World/deployments",
                        "downloads_url": "https://api.github.com/repos/octocat/Hello-World/downloads",
                        "events_url": "https://api.github.com/repos/octocat/Hello-World/events",
                        "forks_url": "https://api.github.com/repos/octocat/Hello-World/forks",
                        "git_commits_url": "https://api.github.com/repos/octocat/Hello-World/git/commits{/sha}",
                        "git_refs_url": "https://api.github.com/repos/octocat/Hello-World/git/refs{/sha}",
                        "git_tags_url": "https://api.github.com/repos/octocat/Hello-World/git/tags{/sha}",
                        "git_url": "git:github.com/octocat/Hello-World.git",
                        "issue_comment_url": "https://api.github.com/repos/octocat/Hello-World/issues/comments{/number}",
                        "issue_events_url": "https://api.github.com/repos/octocat/Hello-World/issues/events{/number}",
                        "issues_url": "https://api.github.com/repos/octocat/Hello-World/issues{/number}",
                        "keys_url": "https://api.github.com/repos/octocat/Hello-World/keys{/key_id}",
                        "labels_url": "https://api.github.com/repos/octocat/Hello-World/labels{/name}",
                        "languages_url": "https://api.github.com/repos/octocat/Hello-World/languages",
                        "merges_url": "https://api.github.com/repos/octocat/Hello-World/merges",
                        "milestones_url": "https://api.github.com/repos/octocat/Hello-World/milestones{/number}",
                        "notifications_url": "https://api.github.com/repos/octocat/Hello-World/notifications{?since,all,participating}",
                        "pulls_url": "https://api.github.com/repos/octocat/Hello-World/pulls{/number}",
                        "releases_url": "https://api.github.com/repos/octocat/Hello-World/releases{/id}",
                        "ssh_url": "git@github.com:octocat/Hello-World.git",
                        "stargazers_url": "https://api.github.com/repos/octocat/Hello-World/stargazers",
                        "statuses_url": "https://api.github.com/repos/octocat/Hello-World/statuses/{sha}",
                        "subscribers_url": "https://api.github.com/repos/octocat/Hello-World/subscribers",
                        "subscription_url": "https://api.github.com/repos/octocat/Hello-World/subscription",
                        "tags_url": "https://api.github.com/repos/octocat/Hello-World/tags",
                        "teams_url": "https://api.github.com/repos/octocat/Hello-World/teams",
                        "trees_url": "https://api.github.com/repos/octocat/Hello-World/git/trees{/sha}",
                        "hooks_url": "http://api.github.com/repos/octocat/Hello-World/hooks",
                    },
                    "head_repository": {
                        "id": 217723378,
                        "node_id": "MDEwOlJlcG9zaXRvcnkyMTc3MjMzNzg=",
                        "name": "octo-repo",
                        "full_name": "octo-org/octo-repo",
                        "private": True,
                        "owner": {
                            "login": "octocat",
                            "id": 1,
                            "node_id": "MDQ6VXNlcjE=",
                            "avatar_url": "https://github.com/images/error/octocat_happy.gif",
                            "gravatar_id": "",
                            "url": "https://api.github.com/users/octocat",
                            "html_url": "https://github.com/octocat",
                            "followers_url": "https://api.github.com/users/octocat/followers",
                            "following_url": "https://api.github.com/users/octocat/following{/other_user}",
                            "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
                            "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
                            "subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
                            "organizations_url": "https://api.github.com/users/octocat/orgs",
                            "repos_url": "https://api.github.com/users/octocat/repos",
                            "events_url": "https://api.github.com/users/octocat/events{/privacy}",
                            "received_events_url": "https://api.github.com/users/octocat/received_events",
                            "type": "User",
                            "site_admin": False,
                        },
                        "html_url": "https://github.com/octo-org/octo-repo",
                        "description": None,
                        "fork": False,
                        "url": "https://api.github.com/repos/octo-org/octo-repo",
                        "forks_url": "https://api.github.com/repos/octo-org/octo-repo/forks",
                        "keys_url": "https://api.github.com/repos/octo-org/octo-repo/keys{/key_id}",
                        "collaborators_url": "https://api.github.com/repos/octo-org/octo-repo/collaborators{/collaborator}",
                        "teams_url": "https://api.github.com/repos/octo-org/octo-repo/teams",
                        "hooks_url": "https://api.github.com/repos/octo-org/octo-repo/hooks",
                        "issue_events_url": "https://api.github.com/repos/octo-org/octo-repo/issues/events{/number}",
                        "events_url": "https://api.github.com/repos/octo-org/octo-repo/events",
                        "assignees_url": "https://api.github.com/repos/octo-org/octo-repo/assignees{/user}",
                        "branches_url": "https://api.github.com/repos/octo-org/octo-repo/branches{/branch}",
                        "tags_url": "https://api.github.com/repos/octo-org/octo-repo/tags",
                        "blobs_url": "https://api.github.com/repos/octo-org/octo-repo/git/blobs{/sha}",
                        "git_tags_url": "https://api.github.com/repos/octo-org/octo-repo/git/tags{/sha}",
                        "git_refs_url": "https://api.github.com/repos/octo-org/octo-repo/git/refs{/sha}",
                        "trees_url": "https://api.github.com/repos/octo-org/octo-repo/git/trees{/sha}",
                        "statuses_url": "https://api.github.com/repos/octo-org/octo-repo/statuses/{sha}",
                        "languages_url": "https://api.github.com/repos/octo-org/octo-repo/languages",
                        "stargazers_url": "https://api.github.com/repos/octo-org/octo-repo/stargazers",
                        "contributors_url": "https://api.github.com/repos/octo-org/octo-repo/contributors",
                        "subscribers_url": "https://api.github.com/repos/octo-org/octo-repo/subscribers",
                        "subscription_url": "https://api.github.com/repos/octo-org/octo-repo/subscription",
                        "commits_url": "https://api.github.com/repos/octo-org/octo-repo/commits{/sha}",
                        "git_commits_url": "https://api.github.com/repos/octo-org/octo-repo/git/commits{/sha}",
                        "comments_url": "https://api.github.com/repos/octo-org/octo-repo/comments{/number}",
                        "issue_comment_url": "https://api.github.com/repos/octo-org/octo-repo/issues/comments{/number}",
                        "contents_url": "https://api.github.com/repos/octo-org/octo-repo/contents/{+path}",
                        "compare_url": "https://api.github.com/repos/octo-org/octo-repo/compare/{base}...{head}",
                        "merges_url": "https://api.github.com/repos/octo-org/octo-repo/merges",
                        "archive_url": "https://api.github.com/repos/octo-org/octo-repo/{archive_format}{/ref}",
                        "downloads_url": "https://api.github.com/repos/octo-org/octo-repo/downloads",
                        "issues_url": "https://api.github.com/repos/octo-org/octo-repo/issues{/number}",
                        "pulls_url": "https://api.github.com/repos/octo-org/octo-repo/pulls{/number}",
                        "milestones_url": "https://api.github.com/repos/octo-org/octo-repo/milestones{/number}",
                        "notifications_url": "https://api.github.com/repos/octo-org/octo-repo/notifications{?since,all,participating}",
                        "labels_url": "https://api.github.com/repos/octo-org/octo-repo/labels{/name}",
                        "releases_url": "https://api.github.com/repos/octo-org/octo-repo/releases{/id}",
                        "deployments_url": "https://api.github.com/repos/octo-org/octo-repo/deployments",
                    },
                },
            ]
        }

        self.client.get_all.return_value = AsyncIteratorMock(
            [response1, response2]
        )

        async_it = aiter(
            self.api.get_workflow_runs(
                "foo/bar",
                "ci.yml",
                actor="foo",
                branch="stable",
                exclude_pull_requests=True,
            )
        )
        run = await anext(async_it)
        self.assertEqual(run.id, 1)
        run = await anext(async_it)
        self.assertEqual(run.id, 2)
        run = await anext(async_it)
        self.assertEqual(run.id, 3)

        with self.assertRaises(StopAsyncIteration):
            await anext(async_it)

        self.client.get_all.assert_called_once_with(
            "/repos/foo/bar/actions/workflows/ci.yml/runs",
            params={
                "actor": "foo",
                "branch": "stable",
                "exclude_pull_requests": True,
                "per_page": "100",
            },
        )

    async def test_get_workflow_run(self):
        response = create_response()
        response.json.return_value = {
            "id": 1,
            "name": "Build",
            "node_id": "MDEyOldvcmtmbG93IFJ1bjI2OTI4OQ==",
            "check_suite_id": 42,
            "check_suite_node_id": "MDEwOkNoZWNrU3VpdGU0Mg==",
            "head_branch": "master",
            "head_sha": "acb5820ced9479c074f688cc328bf03f341a511d",
            "run_number": 562,
            "event": "push",
            "status": "queued",
            "conclusion": None,
            "workflow_id": 159038,
            "url": "https://api.github.com/repos/octo-org/octo-repo/actions/runs/30433642",
            "html_url": "https://github.com/octo-org/octo-repo/actions/runs/30433642",
            "pull_requests": [],
            "created_at": "2020-01-22T19:33:08Z",
            "updated_at": "2020-01-22T19:33:08Z",
            "actor": {
                "login": "octocat",
                "id": 1,
                "node_id": "MDQ6VXNlcjE=",
                "avatar_url": "https://github.com/images/error/octocat_happy.gif",
                "gravatar_id": "",
                "url": "https://api.github.com/users/octocat",
                "html_url": "https://github.com/octocat",
                "followers_url": "https://api.github.com/users/octocat/followers",
                "following_url": "https://api.github.com/users/octocat/following{/other_user}",
                "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
                "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
                "subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
                "organizations_url": "https://api.github.com/users/octocat/orgs",
                "repos_url": "https://api.github.com/users/octocat/repos",
                "events_url": "https://api.github.com/users/octocat/events{/privacy}",
                "received_events_url": "https://api.github.com/users/octocat/received_events",
                "type": "User",
                "site_admin": False,
            },
            "run_attempt": 1,
            "run_started_at": "2020-01-22T19:33:08Z",
            "triggering_actor": {
                "login": "octocat",
                "id": 1,
                "node_id": "MDQ6VXNlcjE=",
                "avatar_url": "https://github.com/images/error/octocat_happy.gif",
                "gravatar_id": "",
                "url": "https://api.github.com/users/octocat",
                "html_url": "https://github.com/octocat",
                "followers_url": "https://api.github.com/users/octocat/followers",
                "following_url": "https://api.github.com/users/octocat/following{/other_user}",
                "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
                "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
                "subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
                "organizations_url": "https://api.github.com/users/octocat/orgs",
                "repos_url": "https://api.github.com/users/octocat/repos",
                "events_url": "https://api.github.com/users/octocat/events{/privacy}",
                "received_events_url": "https://api.github.com/users/octocat/received_events",
                "type": "User",
                "site_admin": False,
            },
            "jobs_url": "https://api.github.com/repos/octo-org/octo-repo/actions/runs/30433642/jobs",
            "logs_url": "https://api.github.com/repos/octo-org/octo-repo/actions/runs/30433642/logs",
            "check_suite_url": "https://api.github.com/repos/octo-org/octo-repo/check-suites/414944374",
            "artifacts_url": "https://api.github.com/repos/octo-org/octo-repo/actions/runs/30433642/artifacts",
            "cancel_url": "https://api.github.com/repos/octo-org/octo-repo/actions/runs/30433642/cancel",
            "rerun_url": "https://api.github.com/repos/octo-org/octo-repo/actions/runs/30433642/rerun",
            "workflow_url": "https://api.github.com/repos/octo-org/octo-repo/actions/workflows/159038",
            "head_commit": {
                "id": "acb5820ced9479c074f688cc328bf03f341a511d",
                "tree_id": "d23f6eedb1e1b9610bbc754ddb5197bfe7271223",
                "message": "Create linter.yaml",
                "timestamp": "2020-01-22T19:33:05Z",
                "author": {
                    "name": "Octo Cat",
                    "email": "octocat@github.com",
                },
                "committer": {
                    "name": "GitHub",
                    "email": "noreply@github.com",
                },
            },
            "repository": {
                "id": 1296269,
                "node_id": "MDEwOlJlcG9zaXRvcnkxMjk2MjY5",
                "name": "Hello-World",
                "full_name": "octocat/Hello-World",
                "owner": {
                    "login": "octocat",
                    "id": 1,
                    "node_id": "MDQ6VXNlcjE=",
                    "avatar_url": "https://github.com/images/error/octocat_happy.gif",
                    "gravatar_id": "",
                    "url": "https://api.github.com/users/octocat",
                    "html_url": "https://github.com/octocat",
                    "followers_url": "https://api.github.com/users/octocat/followers",
                    "following_url": "https://api.github.com/users/octocat/following{/other_user}",
                    "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
                    "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
                    "subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
                    "organizations_url": "https://api.github.com/users/octocat/orgs",
                    "repos_url": "https://api.github.com/users/octocat/repos",
                    "events_url": "https://api.github.com/users/octocat/events{/privacy}",
                    "received_events_url": "https://api.github.com/users/octocat/received_events",
                    "type": "User",
                    "site_admin": False,
                },
                "private": False,
                "html_url": "https://github.com/octocat/Hello-World",
                "description": "This your first repo!",
                "fork": False,
                "url": "https://api.github.com/repos/octocat/Hello-World",
                "archive_url": "https://api.github.com/repos/octocat/Hello-World/{archive_format}{/ref}",
                "assignees_url": "https://api.github.com/repos/octocat/Hello-World/assignees{/user}",
                "blobs_url": "https://api.github.com/repos/octocat/Hello-World/git/blobs{/sha}",
                "branches_url": "https://api.github.com/repos/octocat/Hello-World/branches{/branch}",
                "collaborators_url": "https://api.github.com/repos/octocat/Hello-World/collaborators{/collaborator}",
                "comments_url": "https://api.github.com/repos/octocat/Hello-World/comments{/number}",
                "commits_url": "https://api.github.com/repos/octocat/Hello-World/commits{/sha}",
                "compare_url": "https://api.github.com/repos/octocat/Hello-World/compare/{base}...{head}",
                "contents_url": "https://api.github.com/repos/octocat/Hello-World/contents/{+path}",
                "contributors_url": "https://api.github.com/repos/octocat/Hello-World/contributors",
                "deployments_url": "https://api.github.com/repos/octocat/Hello-World/deployments",
                "downloads_url": "https://api.github.com/repos/octocat/Hello-World/downloads",
                "events_url": "https://api.github.com/repos/octocat/Hello-World/events",
                "forks_url": "https://api.github.com/repos/octocat/Hello-World/forks",
                "git_commits_url": "https://api.github.com/repos/octocat/Hello-World/git/commits{/sha}",
                "git_refs_url": "https://api.github.com/repos/octocat/Hello-World/git/refs{/sha}",
                "git_tags_url": "https://api.github.com/repos/octocat/Hello-World/git/tags{/sha}",
                "git_url": "git:github.com/octocat/Hello-World.git",
                "issue_comment_url": "https://api.github.com/repos/octocat/Hello-World/issues/comments{/number}",
                "issue_events_url": "https://api.github.com/repos/octocat/Hello-World/issues/events{/number}",
                "issues_url": "https://api.github.com/repos/octocat/Hello-World/issues{/number}",
                "keys_url": "https://api.github.com/repos/octocat/Hello-World/keys{/key_id}",
                "labels_url": "https://api.github.com/repos/octocat/Hello-World/labels{/name}",
                "languages_url": "https://api.github.com/repos/octocat/Hello-World/languages",
                "merges_url": "https://api.github.com/repos/octocat/Hello-World/merges",
                "milestones_url": "https://api.github.com/repos/octocat/Hello-World/milestones{/number}",
                "notifications_url": "https://api.github.com/repos/octocat/Hello-World/notifications{?since,all,participating}",
                "pulls_url": "https://api.github.com/repos/octocat/Hello-World/pulls{/number}",
                "releases_url": "https://api.github.com/repos/octocat/Hello-World/releases{/id}",
                "ssh_url": "git@github.com:octocat/Hello-World.git",
                "stargazers_url": "https://api.github.com/repos/octocat/Hello-World/stargazers",
                "statuses_url": "https://api.github.com/repos/octocat/Hello-World/statuses/{sha}",
                "subscribers_url": "https://api.github.com/repos/octocat/Hello-World/subscribers",
                "subscription_url": "https://api.github.com/repos/octocat/Hello-World/subscription",
                "tags_url": "https://api.github.com/repos/octocat/Hello-World/tags",
                "teams_url": "https://api.github.com/repos/octocat/Hello-World/teams",
                "trees_url": "https://api.github.com/repos/octocat/Hello-World/git/trees{/sha}",
                "hooks_url": "http://api.github.com/repos/octocat/Hello-World/hooks",
            },
            "head_repository": {
                "id": 217723378,
                "node_id": "MDEwOlJlcG9zaXRvcnkyMTc3MjMzNzg=",
                "name": "octo-repo",
                "full_name": "octo-org/octo-repo",
                "private": True,
                "owner": {
                    "login": "octocat",
                    "id": 1,
                    "node_id": "MDQ6VXNlcjE=",
                    "avatar_url": "https://github.com/images/error/octocat_happy.gif",
                    "gravatar_id": "",
                    "url": "https://api.github.com/users/octocat",
                    "html_url": "https://github.com/octocat",
                    "followers_url": "https://api.github.com/users/octocat/followers",
                    "following_url": "https://api.github.com/users/octocat/following{/other_user}",
                    "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
                    "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
                    "subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
                    "organizations_url": "https://api.github.com/users/octocat/orgs",
                    "repos_url": "https://api.github.com/users/octocat/repos",
                    "events_url": "https://api.github.com/users/octocat/events{/privacy}",
                    "received_events_url": "https://api.github.com/users/octocat/received_events",
                    "type": "User",
                    "site_admin": False,
                },
                "html_url": "https://github.com/octo-org/octo-repo",
                "description": None,
                "fork": False,
                "url": "https://api.github.com/repos/octo-org/octo-repo",
                "forks_url": "https://api.github.com/repos/octo-org/octo-repo/forks",
                "keys_url": "https://api.github.com/repos/octo-org/octo-repo/keys{/key_id}",
                "collaborators_url": "https://api.github.com/repos/octo-org/octo-repo/collaborators{/collaborator}",
                "teams_url": "https://api.github.com/repos/octo-org/octo-repo/teams",
                "hooks_url": "https://api.github.com/repos/octo-org/octo-repo/hooks",
                "issue_events_url": "https://api.github.com/repos/octo-org/octo-repo/issues/events{/number}",
                "events_url": "https://api.github.com/repos/octo-org/octo-repo/events",
                "assignees_url": "https://api.github.com/repos/octo-org/octo-repo/assignees{/user}",
                "branches_url": "https://api.github.com/repos/octo-org/octo-repo/branches{/branch}",
                "tags_url": "https://api.github.com/repos/octo-org/octo-repo/tags",
                "blobs_url": "https://api.github.com/repos/octo-org/octo-repo/git/blobs{/sha}",
                "git_tags_url": "https://api.github.com/repos/octo-org/octo-repo/git/tags{/sha}",
                "git_refs_url": "https://api.github.com/repos/octo-org/octo-repo/git/refs{/sha}",
                "trees_url": "https://api.github.com/repos/octo-org/octo-repo/git/trees{/sha}",
                "statuses_url": "https://api.github.com/repos/octo-org/octo-repo/statuses/{sha}",
                "languages_url": "https://api.github.com/repos/octo-org/octo-repo/languages",
                "stargazers_url": "https://api.github.com/repos/octo-org/octo-repo/stargazers",
                "contributors_url": "https://api.github.com/repos/octo-org/octo-repo/contributors",
                "subscribers_url": "https://api.github.com/repos/octo-org/octo-repo/subscribers",
                "subscription_url": "https://api.github.com/repos/octo-org/octo-repo/subscription",
                "commits_url": "https://api.github.com/repos/octo-org/octo-repo/commits{/sha}",
                "git_commits_url": "https://api.github.com/repos/octo-org/octo-repo/git/commits{/sha}",
                "comments_url": "https://api.github.com/repos/octo-org/octo-repo/comments{/number}",
                "issue_comment_url": "https://api.github.com/repos/octo-org/octo-repo/issues/comments{/number}",
                "contents_url": "https://api.github.com/repos/octo-org/octo-repo/contents/{+path}",
                "compare_url": "https://api.github.com/repos/octo-org/octo-repo/compare/{base}...{head}",
                "merges_url": "https://api.github.com/repos/octo-org/octo-repo/merges",
                "archive_url": "https://api.github.com/repos/octo-org/octo-repo/{archive_format}{/ref}",
                "downloads_url": "https://api.github.com/repos/octo-org/octo-repo/downloads",
                "issues_url": "https://api.github.com/repos/octo-org/octo-repo/issues{/number}",
                "pulls_url": "https://api.github.com/repos/octo-org/octo-repo/pulls{/number}",
                "milestones_url": "https://api.github.com/repos/octo-org/octo-repo/milestones{/number}",
                "notifications_url": "https://api.github.com/repos/octo-org/octo-repo/notifications{?since,all,participating}",
                "labels_url": "https://api.github.com/repos/octo-org/octo-repo/labels{/name}",
                "releases_url": "https://api.github.com/repos/octo-org/octo-repo/releases{/id}",
                "deployments_url": "https://api.github.com/repos/octo-org/octo-repo/deployments",
            },
        }
        self.client.get.return_value = response

        workflow = await self.api.get_workflow_run("foo/bar", "123")

        self.client.get.assert_awaited_once_with(
            "/repos/foo/bar/actions/runs/123"
        )

        self.assertEqual(workflow.id, 1)

    async def test_get_workflow_run_failure(self):
        response = create_response()
        self.client.get.side_effect = httpx.HTTPStatusError(
            "404", request=MagicMock(), response=response
        )

        with self.assertRaises(httpx.HTTPStatusError):
            await self.api.get_workflow_run("foo/bar", "123")

        self.client.get.assert_awaited_once_with(
            "/repos/foo/bar/actions/runs/123"
        )

    async def test_create_workflow_dispatch(self):
        response = create_response()
        self.client.post.return_value = response

        input_dict = {"foo": "bar"}

        await self.api.create_workflow_dispatch(
            "foo/bar", "ci.yml", ref="stable", inputs=input_dict
        )

        self.client.post.assert_awaited_once_with(
            "/repos/foo/bar/actions/workflows/ci.yml/dispatches",
            data={"ref": "stable", "inputs": input_dict},
        )

    async def test_create_workflow_dispatch_failure(self):
        response = create_response()
        self.client.post.side_effect = httpx.HTTPStatusError(
            "404", request=MagicMock(), response=response
        )

        input_dict = {"foo": "bar"}

        with self.assertRaises(httpx.HTTPStatusError):
            await self.api.create_workflow_dispatch(
                "foo/bar", "ci.yml", ref="stable", inputs=input_dict
            )

        self.client.post.assert_awaited_once_with(
            "/repos/foo/bar/actions/workflows/ci.yml/dispatches",
            data={"ref": "stable", "inputs": input_dict},
        )
pontos-25.3.2/tests/github/models/000077500000000000000000000000001476255566300170575ustar00rootroot00000000000000pontos-25.3.2/tests/github/models/__init__.py000066400000000000000000000001411476255566300211640ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later
#
pontos-25.3.2/tests/github/models/test_artifact.py000066400000000000000000000050371476255566300222720ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later
#

# pylint: disable=line-too-long

import unittest
from datetime import datetime, timezone

from pontos.github.models.artifact import Artifact


class ArtifactTestCase(unittest.TestCase):
    def test_from_dict(self):
        artifact = Artifact.from_dict(
            {
                "id": 1,
                "node_id": "MDg6QXJ0aWZhY3QxMQ==",
                "name": "Rails",
                "size_in_bytes": 556,
                "url": "https://api.github.com/repos/octo-org/octo-docs/actions/artifacts/11",
                "archive_download_url": "https://api.github.com/repos/octo-org/octo-docs/actions/artifacts/11/zip",
                "expired": False,
                "created_at": "2020-01-10T14:59:22Z",
                "expires_at": "2020-03-21T14:59:22Z",
                "updated_at": "2020-02-21T14:59:22Z",
                "workflow_run": {
                    "id": 1,
                    "repository_id": 2,
                    "head_repository_id": 3,
                    "head_branch": "main",
                    "head_sha": "328faa0536e6fef19753d9d91dc96a9931694ce3",
                },
            }
        )

        self.assertEqual(artifact.id, 1)
        self.assertEqual(artifact.node_id, "MDg6QXJ0aWZhY3QxMQ==")
        self.assertEqual(artifact.name, "Rails")
        self.assertEqual(artifact.size_in_bytes, 556)
        self.assertEqual(
            artifact.url,
            "https://api.github.com/repos/octo-org/octo-docs/actions/artifacts/11",
        )
        self.assertEqual(
            artifact.archive_download_url,
            "https://api.github.com/repos/octo-org/octo-docs/actions/artifacts/11/zip",
        )
        self.assertFalse(artifact.expired)
        self.assertEqual(
            artifact.created_at,
            datetime(2020, 1, 10, 14, 59, 22, tzinfo=timezone.utc),
        )
        self.assertEqual(
            artifact.expires_at,
            datetime(2020, 3, 21, 14, 59, 22, tzinfo=timezone.utc),
        )
        self.assertEqual(
            artifact.updated_at,
            datetime(2020, 2, 21, 14, 59, 22, tzinfo=timezone.utc),
        )

        workflow_run = artifact.workflow_run
        self.assertEqual(workflow_run.id, 1)
        self.assertEqual(workflow_run.repository_id, 2)
        self.assertEqual(workflow_run.head_repository_id, 3)
        self.assertEqual(workflow_run.head_branch, "main")
        self.assertEqual(
            workflow_run.head_sha, "328faa0536e6fef19753d9d91dc96a9931694ce3"
        )
pontos-25.3.2/tests/github/models/test_base.py000066400000000000000000000217741476255566300214150ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later
#

# pylint: disable=line-too-long


import unittest

from pontos.github.api.teams import TeamPrivacy
from pontos.github.models.base import App, Permission, Team, User


class UserTestCase(unittest.TestCase):
    def test_from_dict(self):
        data = {
            "login": "greenbone",
            "id": 31986857,
            "node_id": "MDEyOk9yZ2FuaXphdGlvbjMxOTg2ODU3",
            "avatar_url": "https://avatars.githubusercontent.com/u/31986857?v=4",
            "gravatar_id": "",
            "url": "https://api.github.com/users/greenbone",
            "html_url": "https://github.com/greenbone",
            "followers_url": "https://api.github.com/users/greenbone/followers",
            "following_url": "https://api.github.com/users/greenbone/following{/other_user}",
            "gists_url": "https://api.github.com/users/greenbone/gists{/gist_id}",
            "starred_url": "https://api.github.com/users/greenbone/starred{/owner}{/repo}",
            "subscriptions_url": "https://api.github.com/users/greenbone/subscriptions",
            "organizations_url": "https://api.github.com/users/greenbone/orgs",
            "repos_url": "https://api.github.com/users/greenbone/repos",
            "events_url": "https://api.github.com/users/greenbone/events{/privacy}",
            "received_events_url": "https://api.github.com/users/greenbone/received_events",
            "type": "Organization",
            "site_admin": False,
        }

        user = User.from_dict(data)

        self.assertEqual(user.login, "greenbone")
        self.assertEqual(user.id, 31986857)
        self.assertEqual(user.node_id, "MDEyOk9yZ2FuaXphdGlvbjMxOTg2ODU3")
        self.assertEqual(user.gravatar_id, "")
        self.assertEqual(user.url, "https://api.github.com/users/greenbone")
        self.assertEqual(user.html_url, "https://github.com/greenbone")
        self.assertEqual(
            user.followers_url,
            "https://api.github.com/users/greenbone/followers",
        )
        self.assertEqual(
            user.following_url,
            "https://api.github.com/users/greenbone/following{/other_user}",
        )
        self.assertEqual(
            user.gists_url,
            "https://api.github.com/users/greenbone/gists{/gist_id}",
        )
        self.assertEqual(
            user.starred_url,
            "https://api.github.com/users/greenbone/starred{/owner}{/repo}",
        )
        self.assertEqual(
            user.subscriptions_url,
            "https://api.github.com/users/greenbone/subscriptions",
        )
        self.assertEqual(
            user.organizations_url,
            "https://api.github.com/users/greenbone/orgs",
        )
        self.assertEqual(
            user.repos_url, "https://api.github.com/users/greenbone/repos"
        )
        self.assertEqual(
            user.events_url,
            "https://api.github.com/users/greenbone/events{/privacy}",
        )
        self.assertEqual(
            user.received_events_url,
            "https://api.github.com/users/greenbone/received_events",
        )
        self.assertEqual(user.type, "Organization")
        self.assertFalse(user.site_admin)


class TeamTestCase(unittest.TestCase):
    def test_from_dict(self):
        data = {
            "name": "python-gvm-maintainers",
            "id": 3764115,
            "node_id": "MDQ6VGVhbTM3NjQxMTU=",
            "slug": "python-gvm-maintainers",
            "description": "Maintainers of python code at GVM",
            "privacy": "closed",
            "url": "https://api.github.com/organizations/31986857/team/3764115",
            "html_url": "https://github.com/orgs/greenbone/teams/python-gvm-maintainers",
            "members_url": "https://api.github.com/organizations/31986857/team/3764115/members{/member}",
            "repositories_url": "https://api.github.com/organizations/31986857/team/3764115/repos",
            "permission": "pull",
            "parent": None,
        }

        team = Team.from_dict(data)

        self.assertEqual(team.name, "python-gvm-maintainers")
        self.assertEqual(team.id, 3764115)
        self.assertEqual(team.node_id, "MDQ6VGVhbTM3NjQxMTU=")
        self.assertEqual(team.slug, "python-gvm-maintainers")
        self.assertEqual(team.description, "Maintainers of python code at GVM")
        self.assertEqual(team.privacy, TeamPrivacy.CLOSED)
        self.assertEqual(
            team.url,
            "https://api.github.com/organizations/31986857/team/3764115",
        )
        self.assertEqual(
            team.html_url,
            "https://github.com/orgs/greenbone/teams/python-gvm-maintainers",
        )
        self.assertEqual(
            team.members_url,
            "https://api.github.com/organizations/31986857/team/3764115/members{/member}",
        )
        self.assertEqual(
            team.repositories_url,
            "https://api.github.com/organizations/31986857/team/3764115/repos",
        )
        self.assertEqual(team.permission, Permission.PULL)
        self.assertIsNone(team.parent)


class AppTestCase(unittest.TestCase):
    def test_from_dict(self):
        data = {
            "id": 1,
            "slug": "octoapp",
            "node_id": "MDExOkludGVncmF0aW9uMQ==",
            "owner": {
                "login": "github",
                "id": 1,
                "node_id": "MDEyOk9yZ2FuaXphdGlvbjE=",
                "avatar_url": "https://github.com/images/error/octocat_happy.gif",
                "gravatar_id": "",
                "url": "https://api.github.com/orgs/github",
                "html_url": "https://github.com/github",
                "followers_url": "https://api.github.com/users/github/followers",
                "following_url": "https://api.github.com/users/github/following{/other_user}",
                "gists_url": "https://api.github.com/users/github/gists{/gist_id}",
                "starred_url": "https://api.github.com/users/github/starred{/owner}{/repo}",
                "subscriptions_url": "https://api.github.com/users/github/subscriptions",
                "organizations_url": "https://api.github.com/users/github/orgs",
                "repos_url": "https://api.github.com/orgs/github/repos",
                "events_url": "https://api.github.com/orgs/github/events",
                "received_events_url": "https://api.github.com/users/github/received_events",
                "type": "Organization",
                "site_admin": False,
            },
            "name": "Octocat App",
            "description": "",
            "external_url": "https://example.com",
            "html_url": "https://github.com/apps/octoapp",
            "created_at": "2017-07-08T16:18:44-04:00",
            "updated_at": "2017-07-08T16:18:44-04:00",
            "events": ["push", "pull_request"],
        }

        app = App.from_dict(data)

        self.assertEqual(app.id, 1)
        self.assertEqual(app.slug, "octoapp")
        self.assertEqual(app.node_id, "MDExOkludGVncmF0aW9uMQ==")
        self.assertEqual(app.owner.login, "github")
        self.assertEqual(app.owner.id, 1)
        self.assertEqual(app.owner.node_id, "MDEyOk9yZ2FuaXphdGlvbjE=")
        self.assertEqual(
            app.owner.avatar_url,
            "https://github.com/images/error/octocat_happy.gif",
        )
        self.assertEqual(app.owner.gravatar_id, "")
        self.assertEqual(app.owner.url, "https://api.github.com/orgs/github")
        self.assertEqual(app.owner.html_url, "https://github.com/github")
        self.assertEqual(
            app.owner.followers_url,
            "https://api.github.com/users/github/followers",
        )
        self.assertEqual(
            app.owner.following_url,
            "https://api.github.com/users/github/following{/other_user}",
        )
        self.assertEqual(
            app.owner.gists_url,
            "https://api.github.com/users/github/gists{/gist_id}",
        )
        self.assertEqual(
            app.owner.starred_url,
            "https://api.github.com/users/github/starred{/owner}{/repo}",
        )
        self.assertEqual(
            app.owner.subscriptions_url,
            "https://api.github.com/users/github/subscriptions",
        )
        self.assertEqual(
            app.owner.organizations_url,
            "https://api.github.com/users/github/orgs",
        )
        self.assertEqual(
            app.owner.repos_url, "https://api.github.com/orgs/github/repos"
        )
        self.assertEqual(
            app.owner.events_url, "https://api.github.com/orgs/github/events"
        )
        self.assertEqual(app.owner.type, "Organization")
        self.assertFalse(app.owner.site_admin)
        self.assertEqual(app.name, "Octocat App")
        self.assertEqual(app.description, "")
        self.assertEqual(app.external_url, "https://example.com")
        self.assertEqual(app.html_url, "https://github.com/apps/octoapp")
        self.assertEqual(app.created_at, "2017-07-08T16:18:44-04:00")
        self.assertEqual(app.updated_at, "2017-07-08T16:18:44-04:00")
        self.assertEqual(app.events, ["push", "pull_request"])
pontos-25.3.2/tests/github/models/test_billing.py000066400000000000000000000042321476255566300221110ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later

# ruff: noqa:E501

import unittest

from pontos.github.models.billing import (
    ActionsBilling,
    PackagesBilling,
    StorageBilling,
)


class ActionsBillingTestCase(unittest.TestCase):
    def test_from_dict(self):
        billing = ActionsBilling.from_dict(
            {
                "total_minutes_used": 305,
                "total_paid_minutes_used": 0,
                "included_minutes": 3000,
                "minutes_used_breakdown": {
                    "UBUNTU": 205,
                    "MACOS": 10,
                    "WINDOWS": 90,
                },
            }
        )

        self.assertEqual(billing.total_minutes_used, 305)
        self.assertEqual(billing.total_paid_minutes_used, 0)
        self.assertEqual(billing.included_minutes, 3000)
        self.assertEqual(billing.minutes_used_breakdown.UBUNTU, 205)
        self.assertEqual(billing.minutes_used_breakdown.MACOS, 10)
        self.assertEqual(billing.minutes_used_breakdown.WINDOWS, 90)
        self.assertIsNone(billing.minutes_used_breakdown.total)


class PackagesBillingTestCase(unittest.TestCase):
    def test_from_dict(self):
        billing = PackagesBilling.from_dict(
            {
                "total_gigabytes_bandwidth_used": 50,
                "total_paid_gigabytes_bandwidth_used": 40,
                "included_gigabytes_bandwidth": 10,
            }
        )

        self.assertEqual(billing.total_gigabytes_bandwidth_used, 50)
        self.assertEqual(billing.total_paid_gigabytes_bandwidth_used, 40)
        self.assertEqual(billing.included_gigabytes_bandwidth, 10)


class StorageBillingTestCase(unittest.TestCase):
    def test_from_dict(self):
        billing = StorageBilling.from_dict(
            {
                "days_left_in_billing_cycle": 20,
                "estimated_paid_storage_for_month": 15,
                "estimated_storage_for_month": 40,
            }
        )
        self.assertEqual(billing.days_left_in_billing_cycle, 20)
        self.assertEqual(billing.estimated_paid_storage_for_month, 15)
        self.assertEqual(billing.estimated_storage_for_month, 40)
pontos-25.3.2/tests/github/models/test_branch.py000066400000000000000000000750261476255566300217370ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later
#

# pylint: disable=line-too-long

import unittest

from pontos.github.api.teams import TeamPrivacy
from pontos.github.models.base import Permission
from pontos.github.models.branch import (
    BranchProtection,
    RequiredPullRequestReviews,
    RequiredStatusChecks,
    Restrictions,
)


class RestrictionsTestCase(unittest.TestCase):
    def test_from_dict(self):
        data = {
            "url": "https://api.github.com/repos/foo/bar/branches/branch_protection/protection/restrictions",
            "users_url": "https://api.github.com/repos/foo/bar/branches/branch_protection/protection/restrictions/users",
            "teams_url": "https://api.github.com/repos/foo/bar/branches/branch_protection/protection/restrictions/teams",
            "apps_url": "https://api.github.com/repos/foo/bar/branches/branch_protection/protection/restrictions/apps",
            "users": [
                {
                    "login": "greenbonebot",
                    "id": 123,
                    "node_id": "MDQ6VXNlcjg1MjU0NjY2",
                    "avatar_url": "https://avatars.githubusercontent.com/u/85254666?v=4",
                    "gravatar_id": "",
                    "url": "https://api.github.com/users/greenbonebot",
                    "html_url": "https://github.com/greenbonebot",
                    "followers_url": "https://api.github.com/users/greenbonebot/followers",
                    "following_url": "https://api.github.com/users/greenbonebot/following{/other_user}",
                    "gists_url": "https://api.github.com/users/greenbonebot/gists{/gist_id}",
                    "starred_url": "https://api.github.com/users/greenbonebot/starred{/owner}{/repo}",
                    "subscriptions_url": "https://api.github.com/users/greenbonebot/subscriptions",
                    "organizations_url": "https://api.github.com/users/greenbonebot/orgs",
                    "repos_url": "https://api.github.com/users/greenbonebot/repos",
                    "events_url": "https://api.github.com/users/greenbonebot/events{/privacy}",
                    "received_events_url": "https://api.github.com/users/greenbonebot/received_events",
                    "type": "User",
                    "site_admin": False,
                }
            ],
            "teams": [],
            "apps": [],
        }

        restrictions = Restrictions.from_dict(data)
        self.assertEqual(
            restrictions.url,
            "https://api.github.com/repos/foo/bar/branches/branch_protection/protection/restrictions",
        )
        self.assertEqual(
            restrictions.users_url,
            "https://api.github.com/repos/foo/bar/branches/branch_protection/protection/restrictions/users",
        )
        self.assertEqual(
            restrictions.teams_url,
            "https://api.github.com/repos/foo/bar/branches/branch_protection/protection/restrictions/teams",
        )
        self.assertEqual(
            restrictions.apps_url,
            "https://api.github.com/repos/foo/bar/branches/branch_protection/protection/restrictions/apps",
        )
        user = restrictions.users[0]
        self.assertEqual(len(restrictions.users), 1)
        self.assertEqual(user.login, "greenbonebot")
        self.assertEqual(user.node_id, "MDQ6VXNlcjg1MjU0NjY2")
        self.assertEqual(
            user.avatar_url,
            "https://avatars.githubusercontent.com/u/85254666?v=4",
        )
        self.assertEqual(user.gravatar_id, "")
        self.assertEqual(user.url, "https://api.github.com/users/greenbonebot")
        self.assertEqual(user.html_url, "https://github.com/greenbonebot")
        self.assertEqual(
            user.followers_url,
            "https://api.github.com/users/greenbonebot/followers",
        )
        self.assertEqual(
            user.following_url,
            "https://api.github.com/users/greenbonebot/following{/other_user}",
        )
        self.assertEqual(
            user.gists_url,
            "https://api.github.com/users/greenbonebot/gists{/gist_id}",
        )
        self.assertEqual(
            user.starred_url,
            "https://api.github.com/users/greenbonebot/starred{/owner}{/repo}",
        )
        self.assertEqual(
            user.subscriptions_url,
            "https://api.github.com/users/greenbonebot/subscriptions",
        )
        self.assertEqual(
            user.organizations_url,
            "https://api.github.com/users/greenbonebot/orgs",
        )
        self.assertEqual(
            user.repos_url, "https://api.github.com/users/greenbonebot/repos"
        )
        self.assertEqual(
            user.events_url,
            "https://api.github.com/users/greenbonebot/events{/privacy}",
        )
        self.assertEqual(
            user.received_events_url,
            "https://api.github.com/users/greenbonebot/received_events",
        )
        self.assertEqual(user.type, "User")
        self.assertEqual(user.site_admin, False)
        self.assertEqual(restrictions.teams, [])
        self.assertEqual(restrictions.apps, [])


class RequiredStatusChecksTestCase(unittest.TestCase):
    def test_from_dict(self):
        data = {
            "url": "https://api.github.com/repos/foo/bar/branches/branch_protection/protection/required_status_checks",
            "strict": True,
            "checks": [
                {"context": "unittests", "app_id": 123},
                {"context": "linting"},
            ],
        }

        checks = RequiredStatusChecks.from_dict(data)

        self.assertEqual(
            checks.url,
            "https://api.github.com/repos/foo/bar/branches/branch_protection/protection/required_status_checks",
        )
        self.assertTrue(checks.strict)
        self.assertEqual(len(checks.checks), 2)
        self.assertEqual(checks.checks[0].context, "unittests")
        self.assertEqual(checks.checks[0].app_id, 123)
        self.assertEqual(checks.checks[1].context, "linting")
        self.assertIsNone(checks.checks[1].app_id)


class RequiredPullRequestReviewsTestCase(unittest.TestCase):
    def test_from_dict(self):
        data = {
            "url": "https://api.github.com/repos/foo/bar/branches/branch_protection/protection/required_pull_request_reviews",
            "dismiss_stale_reviews": True,
            "require_code_owner_reviews": True,
            "require_last_push_approval": True,
            "required_approving_review_count": 1,
            "dismissal_restrictions": {
                "url": "https://api.github.com/repos/foo/bar/branches/branch_protection/protection/dismissal_restrictions",
                "users_url": "https://api.github.com/repos/foo/bar/branches/branch_protection/protection/dismissal_restrictions/users",
                "teams_url": "https://api.github.com/repos/foo/bar/branches/branch_protection/protection/dismissal_restrictions/teams",
                "users": [
                    {
                        "login": "greenbonebot",
                        "id": 123,
                        "node_id": "MDQ6VXNlcjg1MjU0NjY2",
                        "avatar_url": "https://avatars.githubusercontent.com/u/85254666?v=4",
                        "gravatar_id": "",
                        "url": "https://api.github.com/users/greenbonebot",
                        "html_url": "https://github.com/greenbonebot",
                        "followers_url": "https://api.github.com/users/greenbonebot/followers",
                        "following_url": "https://api.github.com/users/greenbonebot/following{/other_user}",
                        "gists_url": "https://api.github.com/users/greenbonebot/gists{/gist_id}",
                        "starred_url": "https://api.github.com/users/greenbonebot/starred{/owner}{/repo}",
                        "subscriptions_url": "https://api.github.com/users/greenbonebot/subscriptions",
                        "organizations_url": "https://api.github.com/users/greenbonebot/orgs",
                        "repos_url": "https://api.github.com/users/greenbonebot/repos",
                        "events_url": "https://api.github.com/users/greenbonebot/events{/privacy}",
                        "received_events_url": "https://api.github.com/users/greenbonebot/received_events",
                        "type": "User",
                        "site_admin": False,
                    }
                ],
                "teams": [
                    {
                        "name": "devops",
                        "id": 123,
                        "node_id": "T_kwDOAegUqc4AUtL1",
                        "slug": "devops",
                        "description": "Team responsible for DevOps",
                        "privacy": "closed",
                        "url": "https://api.github.com/organizations/321/team/123",
                        "html_url": "https://github.com/orgs/foo/teams/devops",
                        "members_url": "https://api.github.com/organizations/321/team/123/members{/member}",
                        "repositories_url": "https://api.github.com/organizations/321/team/123/repos",
                        "permission": "pull",
                        "parent": None,
                    }
                ],
                "apps": [],
            },
        }

        reviews = RequiredPullRequestReviews.from_dict(data)

        self.assertEqual(
            reviews.url,
            "https://api.github.com/repos/foo/bar/branches/branch_protection/protection/required_pull_request_reviews",
        )
        self.assertTrue(reviews.dismiss_stale_reviews)
        self.assertTrue(reviews.require_code_owner_reviews)
        self.assertTrue(reviews.require_last_push_approval)
        self.assertEqual(
            reviews.required_approving_review_count,
            1,
        )
        dismissal_restrictions = reviews.dismissal_restrictions
        self.assertEqual(
            dismissal_restrictions.url,
            "https://api.github.com/repos/foo/bar/branches/branch_protection/protection/dismissal_restrictions",
        )
        self.assertEqual(
            dismissal_restrictions.users_url,
            "https://api.github.com/repos/foo/bar/branches/branch_protection/protection/dismissal_restrictions/users",
        )
        self.assertEqual(
            dismissal_restrictions.teams_url,
            "https://api.github.com/repos/foo/bar/branches/branch_protection/protection/dismissal_restrictions/teams",
        )
        self.assertEqual(dismissal_restrictions.apps, [])
        user = dismissal_restrictions.users[0]
        self.assertEqual(len(dismissal_restrictions.users), 1)
        self.assertEqual(user.login, "greenbonebot")
        self.assertEqual(user.node_id, "MDQ6VXNlcjg1MjU0NjY2")
        self.assertEqual(
            user.avatar_url,
            "https://avatars.githubusercontent.com/u/85254666?v=4",
        )
        self.assertEqual(user.gravatar_id, "")
        self.assertEqual(user.url, "https://api.github.com/users/greenbonebot")
        self.assertEqual(user.html_url, "https://github.com/greenbonebot")
        self.assertEqual(
            user.followers_url,
            "https://api.github.com/users/greenbonebot/followers",
        )
        self.assertEqual(
            user.following_url,
            "https://api.github.com/users/greenbonebot/following{/other_user}",
        )
        self.assertEqual(
            user.gists_url,
            "https://api.github.com/users/greenbonebot/gists{/gist_id}",
        )
        self.assertEqual(
            user.starred_url,
            "https://api.github.com/users/greenbonebot/starred{/owner}{/repo}",
        )
        self.assertEqual(
            user.subscriptions_url,
            "https://api.github.com/users/greenbonebot/subscriptions",
        )
        self.assertEqual(
            user.organizations_url,
            "https://api.github.com/users/greenbonebot/orgs",
        )
        self.assertEqual(
            user.repos_url, "https://api.github.com/users/greenbonebot/repos"
        )
        self.assertEqual(
            user.events_url,
            "https://api.github.com/users/greenbonebot/events{/privacy}",
        )
        self.assertEqual(
            user.received_events_url,
            "https://api.github.com/users/greenbonebot/received_events",
        )
        self.assertEqual(user.type, "User")
        self.assertEqual(user.site_admin, False)
        self.assertEqual(len(dismissal_restrictions.teams), 1)
        team = dismissal_restrictions.teams[0]
        self.assertEqual(team.name, "devops")
        self.assertEqual(team.id, 123)
        self.assertEqual(team.node_id, "T_kwDOAegUqc4AUtL1")
        self.assertEqual(team.slug, "devops")
        self.assertEqual(team.description, "Team responsible for DevOps")
        self.assertEqual(team.permission, Permission.PULL)
        self.assertEqual(team.privacy, TeamPrivacy.CLOSED)
        self.assertEqual(
            team.url, "https://api.github.com/organizations/321/team/123"
        )
        self.assertEqual(
            team.html_url, "https://github.com/orgs/foo/teams/devops"
        )
        self.assertEqual(
            team.members_url,
            "https://api.github.com/organizations/321/team/123/members{/member}",
        )
        self.assertEqual(
            team.repositories_url,
            "https://api.github.com/organizations/321/team/123/repos",
        )
        self.assertIsNone(team.parent)


class BranchProtectionTestCase(unittest.TestCase):
    def test_from_dict_minimal(self):
        data = {
            "url": "https://api.github.com/repos/foo/bar/branches/branch_protection/protection",
            "required_signatures": {
                "url": "https://api.github.com/repos/foo/bar/branches/branch_protection/protection/required_signatures",
                "enabled": False,
            },
            "enforce_admins": {
                "url": "https://api.github.com/repos/foo/bar/branches/branch_protection/protection/enforce_admins",
                "enabled": False,
            },
            "required_linear_history": {"enabled": False},
            "allow_force_pushes": {"enabled": False},
            "allow_deletions": {"enabled": False},
            "block_creations": {"enabled": False},
            "required_conversation_resolution": {"enabled": False},
            "lock_branch": {"enabled": True},
            "allow_fork_syncing": {"enabled": False},
        }

        protection = BranchProtection.from_dict(data)

        self.assertEqual(
            protection.url,
            "https://api.github.com/repos/foo/bar/branches/branch_protection/protection",
        )
        self.assertEqual(
            protection.required_signatures.url,
            "https://api.github.com/repos/foo/bar/branches/branch_protection/protection/required_signatures",
        )
        self.assertFalse(protection.required_signatures.enabled)
        self.assertEqual(
            protection.enforce_admins.url,
            "https://api.github.com/repos/foo/bar/branches/branch_protection/protection/enforce_admins",
        )
        self.assertFalse(protection.enforce_admins.enabled)
        self.assertFalse(protection.required_linear_history.enabled)
        self.assertFalse(protection.allow_force_pushes.enabled)
        self.assertFalse(protection.allow_deletions.enabled)
        self.assertFalse(protection.block_creations.enabled)
        self.assertFalse(protection.required_conversation_resolution.enabled)
        self.assertTrue(protection.lock_branch.enabled)
        self.assertFalse(protection.allow_fork_syncing.enabled)

        self.assertIsNone(protection.required_status_checks)
        self.assertIsNone(protection.required_pull_request_reviews)
        self.assertIsNone(protection.restrictions)

    def test_from_dict(self):
        data = {
            "url": "https://api.github.com/repos/foo/bar/branches/branch_protection/protection",
            "required_status_checks": {
                "url": "https://api.github.com/repos/foo/bar/branches/branch_protection/protection/required_status_checks",
                "strict": True,
                "checks": [],
            },
            "restrictions": {
                "url": "https://api.github.com/repos/foo/bar/branches/branch_protection/protection/restrictions",
                "users_url": "https://api.github.com/repos/foo/bar/branches/branch_protection/protection/restrictions/users",
                "teams_url": "https://api.github.com/repos/foo/bar/branches/branch_protection/protection/restrictions/teams",
                "apps_url": "https://api.github.com/repos/foo/bar/branches/branch_protection/protection/restrictions/apps",
                "users": [
                    {
                        "login": "greenbonebot",
                        "id": 123,
                        "node_id": "MDQ6VXNlcjg1MjU0NjY2",
                        "avatar_url": "https://avatars.githubusercontent.com/u/85254666?v=4",
                        "gravatar_id": "",
                        "url": "https://api.github.com/users/greenbonebot",
                        "html_url": "https://github.com/greenbonebot",
                        "followers_url": "https://api.github.com/users/greenbonebot/followers",
                        "following_url": "https://api.github.com/users/greenbonebot/following{/other_user}",
                        "gists_url": "https://api.github.com/users/greenbonebot/gists{/gist_id}",
                        "starred_url": "https://api.github.com/users/greenbonebot/starred{/owner}{/repo}",
                        "subscriptions_url": "https://api.github.com/users/greenbonebot/subscriptions",
                        "organizations_url": "https://api.github.com/users/greenbonebot/orgs",
                        "repos_url": "https://api.github.com/users/greenbonebot/repos",
                        "events_url": "https://api.github.com/users/greenbonebot/events{/privacy}",
                        "received_events_url": "https://api.github.com/users/greenbonebot/received_events",
                        "type": "User",
                        "site_admin": False,
                    }
                ],
                "teams": [],
                "apps": [],
            },
            "required_pull_request_reviews": {
                "url": "https://api.github.com/repos/foo/bar/branches/branch_protection/protection/required_pull_request_reviews",
                "dismiss_stale_reviews": True,
                "require_code_owner_reviews": True,
                "require_last_push_approval": True,
                "required_approving_review_count": 1,
                "dismissal_restrictions": {
                    "url": "https://api.github.com/repos/foo/bar/branches/branch_protection/protection/dismissal_restrictions",
                    "users_url": "https://api.github.com/repos/foo/bar/branches/branch_protection/protection/dismissal_restrictions/users",
                    "teams_url": "https://api.github.com/repos/foo/bar/branches/branch_protection/protection/dismissal_restrictions/teams",
                    "users": [
                        {
                            "login": "greenbonebot",
                            "id": 123,
                            "node_id": "MDQ6VXNlcjg1MjU0NjY2",
                            "avatar_url": "https://avatars.githubusercontent.com/u/85254666?v=4",
                            "gravatar_id": "",
                            "url": "https://api.github.com/users/greenbonebot",
                            "html_url": "https://github.com/greenbonebot",
                            "followers_url": "https://api.github.com/users/greenbonebot/followers",
                            "following_url": "https://api.github.com/users/greenbonebot/following{/other_user}",
                            "gists_url": "https://api.github.com/users/greenbonebot/gists{/gist_id}",
                            "starred_url": "https://api.github.com/users/greenbonebot/starred{/owner}{/repo}",
                            "subscriptions_url": "https://api.github.com/users/greenbonebot/subscriptions",
                            "organizations_url": "https://api.github.com/users/greenbonebot/orgs",
                            "repos_url": "https://api.github.com/users/greenbonebot/repos",
                            "events_url": "https://api.github.com/users/greenbonebot/events{/privacy}",
                            "received_events_url": "https://api.github.com/users/greenbonebot/received_events",
                            "type": "User",
                            "site_admin": False,
                        }
                    ],
                    "teams": [
                        {
                            "name": "devops",
                            "id": 123,
                            "node_id": "T_kwDOAegUqc4AUtL1",
                            "slug": "devops",
                            "description": "Team responsible for DevOps",
                            "privacy": "closed",
                            "url": "https://api.github.com/organizations/321/team/123",
                            "html_url": "https://github.com/orgs/foo/teams/devops",
                            "members_url": "https://api.github.com/organizations/321/team/123/members{/member}",
                            "repositories_url": "https://api.github.com/organizations/321/team/123/repos",
                            "permission": "pull",
                            "parent": None,
                        }
                    ],
                    "apps": [],
                },
            },
            "required_signatures": {
                "url": "https://api.github.com/repos/foo/bar/branches/branch_protection/protection/required_signatures",
                "enabled": True,
            },
            "enforce_admins": {
                "url": "https://api.github.com/repos/foo/bar/branches/branch_protection/protection/enforce_admins",
                "enabled": True,
            },
            "required_linear_history": {"enabled": True},
            "allow_force_pushes": {"enabled": False},
            "allow_deletions": {"enabled": False},
            "block_creations": {"enabled": True},
            "required_conversation_resolution": {"enabled": True},
            "lock_branch": {"enabled": True},
            "allow_fork_syncing": {"enabled": False},
        }

        protection = BranchProtection.from_dict(data)

        self.assertEqual(
            protection.url,
            "https://api.github.com/repos/foo/bar/branches/branch_protection/protection",
        )
        self.assertEqual(
            protection.required_signatures.url,
            "https://api.github.com/repos/foo/bar/branches/branch_protection/protection/required_signatures",
        )
        self.assertTrue(protection.required_signatures.enabled)
        self.assertEqual(
            protection.enforce_admins.url,
            "https://api.github.com/repos/foo/bar/branches/branch_protection/protection/enforce_admins",
        )
        self.assertTrue(protection.enforce_admins.enabled)
        self.assertTrue(protection.required_linear_history.enabled)
        self.assertFalse(protection.allow_force_pushes.enabled)
        self.assertFalse(protection.allow_deletions.enabled)
        self.assertTrue(protection.block_creations.enabled)
        self.assertTrue(protection.required_conversation_resolution.enabled)
        self.assertTrue(protection.lock_branch.enabled)
        self.assertFalse(protection.allow_fork_syncing.enabled)

        self.assertEqual(
            protection.required_status_checks.url,
            "https://api.github.com/repos/foo/bar/branches/branch_protection/protection/required_status_checks",
        )
        self.assertTrue(protection.required_status_checks.strict)
        self.assertEqual(protection.required_status_checks.checks, [])

        self.assertEqual(
            protection.required_pull_request_reviews.url,
            "https://api.github.com/repos/foo/bar/branches/branch_protection/protection/required_pull_request_reviews",
        )
        self.assertTrue(
            protection.required_pull_request_reviews.dismiss_stale_reviews
        )
        self.assertTrue(
            protection.required_pull_request_reviews.require_code_owner_reviews
        )
        self.assertTrue(
            protection.required_pull_request_reviews.require_last_push_approval
        )
        self.assertEqual(
            protection.required_pull_request_reviews.required_approving_review_count,
            1,
        )
        dismissal_restrictions = (
            protection.required_pull_request_reviews.dismissal_restrictions
        )
        self.assertEqual(
            dismissal_restrictions.url,
            "https://api.github.com/repos/foo/bar/branches/branch_protection/protection/dismissal_restrictions",
        )
        self.assertEqual(
            dismissal_restrictions.users_url,
            "https://api.github.com/repos/foo/bar/branches/branch_protection/protection/dismissal_restrictions/users",
        )
        self.assertEqual(
            dismissal_restrictions.teams_url,
            "https://api.github.com/repos/foo/bar/branches/branch_protection/protection/dismissal_restrictions/teams",
        )
        self.assertEqual(dismissal_restrictions.apps, [])
        user = dismissal_restrictions.users[0]
        self.assertEqual(len(dismissal_restrictions.users), 1)
        self.assertEqual(user.login, "greenbonebot")
        self.assertEqual(user.node_id, "MDQ6VXNlcjg1MjU0NjY2")
        self.assertEqual(
            user.avatar_url,
            "https://avatars.githubusercontent.com/u/85254666?v=4",
        )
        self.assertEqual(user.gravatar_id, "")
        self.assertEqual(user.url, "https://api.github.com/users/greenbonebot")
        self.assertEqual(user.html_url, "https://github.com/greenbonebot")
        self.assertEqual(
            user.followers_url,
            "https://api.github.com/users/greenbonebot/followers",
        )
        self.assertEqual(
            user.following_url,
            "https://api.github.com/users/greenbonebot/following{/other_user}",
        )
        self.assertEqual(
            user.gists_url,
            "https://api.github.com/users/greenbonebot/gists{/gist_id}",
        )
        self.assertEqual(
            user.starred_url,
            "https://api.github.com/users/greenbonebot/starred{/owner}{/repo}",
        )
        self.assertEqual(
            user.subscriptions_url,
            "https://api.github.com/users/greenbonebot/subscriptions",
        )
        self.assertEqual(
            user.organizations_url,
            "https://api.github.com/users/greenbonebot/orgs",
        )
        self.assertEqual(
            user.repos_url, "https://api.github.com/users/greenbonebot/repos"
        )
        self.assertEqual(
            user.events_url,
            "https://api.github.com/users/greenbonebot/events{/privacy}",
        )
        self.assertEqual(
            user.received_events_url,
            "https://api.github.com/users/greenbonebot/received_events",
        )
        self.assertEqual(user.type, "User")
        self.assertEqual(user.site_admin, False)
        self.assertEqual(len(dismissal_restrictions.teams), 1)
        team = dismissal_restrictions.teams[0]
        self.assertEqual(team.name, "devops")
        self.assertEqual(team.id, 123)
        self.assertEqual(team.node_id, "T_kwDOAegUqc4AUtL1")
        self.assertEqual(team.slug, "devops")
        self.assertEqual(team.description, "Team responsible for DevOps")
        self.assertEqual(team.permission, Permission.PULL)
        self.assertEqual(team.privacy, TeamPrivacy.CLOSED)
        self.assertEqual(
            team.url, "https://api.github.com/organizations/321/team/123"
        )
        self.assertEqual(
            team.html_url, "https://github.com/orgs/foo/teams/devops"
        )
        self.assertEqual(
            team.members_url,
            "https://api.github.com/organizations/321/team/123/members{/member}",
        )
        self.assertEqual(
            team.repositories_url,
            "https://api.github.com/organizations/321/team/123/repos",
        )
        self.assertIsNone(team.parent)

        self.assertEqual(
            protection.restrictions.url,
            "https://api.github.com/repos/foo/bar/branches/branch_protection/protection/restrictions",
        )
        self.assertEqual(
            protection.restrictions.users_url,
            "https://api.github.com/repos/foo/bar/branches/branch_protection/protection/restrictions/users",
        )
        self.assertEqual(
            protection.restrictions.teams_url,
            "https://api.github.com/repos/foo/bar/branches/branch_protection/protection/restrictions/teams",
        )
        self.assertEqual(
            protection.restrictions.apps_url,
            "https://api.github.com/repos/foo/bar/branches/branch_protection/protection/restrictions/apps",
        )
        user = protection.restrictions.users[0]
        self.assertEqual(len(protection.restrictions.users), 1)
        self.assertEqual(user.login, "greenbonebot")
        self.assertEqual(user.node_id, "MDQ6VXNlcjg1MjU0NjY2")
        self.assertEqual(
            user.avatar_url,
            "https://avatars.githubusercontent.com/u/85254666?v=4",
        )
        self.assertEqual(user.gravatar_id, "")
        self.assertEqual(user.url, "https://api.github.com/users/greenbonebot")
        self.assertEqual(user.html_url, "https://github.com/greenbonebot")
        self.assertEqual(
            user.followers_url,
            "https://api.github.com/users/greenbonebot/followers",
        )
        self.assertEqual(
            user.following_url,
            "https://api.github.com/users/greenbonebot/following{/other_user}",
        )
        self.assertEqual(
            user.gists_url,
            "https://api.github.com/users/greenbonebot/gists{/gist_id}",
        )
        self.assertEqual(
            user.starred_url,
            "https://api.github.com/users/greenbonebot/starred{/owner}{/repo}",
        )
        self.assertEqual(
            user.subscriptions_url,
            "https://api.github.com/users/greenbonebot/subscriptions",
        )
        self.assertEqual(
            user.organizations_url,
            "https://api.github.com/users/greenbonebot/orgs",
        )
        self.assertEqual(
            user.repos_url, "https://api.github.com/users/greenbonebot/repos"
        )
        self.assertEqual(
            user.events_url,
            "https://api.github.com/users/greenbonebot/events{/privacy}",
        )
        self.assertEqual(
            user.received_events_url,
            "https://api.github.com/users/greenbonebot/received_events",
        )
        self.assertEqual(user.type, "User")
        self.assertEqual(user.site_admin, False)
        self.assertEqual(protection.restrictions.teams, [])
        self.assertEqual(protection.restrictions.apps, [])
pontos-25.3.2/tests/github/models/test_code_scanning.py000066400000000000000000000463051476255566300232720ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later

# ruff: noqa:E501

import unittest
from datetime import datetime, timezone

from pontos.github.models.code_scanning import (
    AlertState,
    Analysis,
    CodeQLDatabase,
    CodeScanningAlert,
    DefaultSetup,
    DefaultSetupState,
    Instance,
    Language,
    Location,
    QuerySuite,
    Rule,
    SarifProcessingStatus,
    SarifUploadInformation,
    Severity,
    Tool,
)

ALERT = {
    "number": 3,
    "created_at": "2020-02-13T12:29:18Z",
    "url": "https://api.github.com/repos/octocat/hello-world/code-scanning/alerts/3",
    "html_url": "https://github.com/octocat/hello-world/code-scanning/3",
    "state": "dismissed",
    "dismissed_by": {
        "login": "octocat",
        "id": 1,
        "node_id": "MDQ6VXNlcjE=",
        "avatar_url": "https://github.com/images/error/octocat_happy.gif",
        "gravatar_id": "",
        "url": "https://api.github.com/users/octocat",
        "html_url": "https://github.com/octocat",
        "followers_url": "https://api.github.com/users/octocat/followers",
        "following_url": "https://api.github.com/users/octocat/following{/other_user}",
        "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
        "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
        "subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
        "organizations_url": "https://api.github.com/users/octocat/orgs",
        "repos_url": "https://api.github.com/users/octocat/repos",
        "events_url": "https://api.github.com/users/octocat/events{/privacy}",
        "received_events_url": "https://api.github.com/users/octocat/received_events",
        "type": "User",
        "site_admin": False,
    },
    "dismissed_at": "2020-02-14T12:29:18Z",
    "dismissed_reason": "false positive",
    "dismissed_comment": "This alert is not actually correct, because there's "
    "a sanitizer included in the library.",
    "rule": {
        "id": "js/zipslip",
        "severity": "error",
        "tags": ["security", "external/cwe/cwe-022"],
        "description": "Arbitrary file write during zip extraction",
        "name": "js/zipslip",
    },
    "tool": {"name": "CodeQL", "guid": None, "version": "2.4.0"},
    "most_recent_instance": {
        "ref": "refs/heads/main",
        "analysis_key": ".github/workflows/codeql-analysis.yml:CodeQL-Build",
        "category": ".github/workflows/codeql-analysis.yml:CodeQL-Build",
        "environment": "{}",
        "state": "open",
        "commit_sha": "39406e42cb832f683daa691dd652a8dc36ee8930",
        "message": {"text": "This path depends on a user-provided value."},
        "location": {
            "path": "lib/ab12-gen.js",
            "start_line": 917,
            "end_line": 917,
            "start_column": 7,
            "end_column": 18,
        },
        "classifications": [],
    },
    "instances_url": "https://api.github.com/repos/octocat/hello-world/code-scanning/alerts/3/instances",
    "repository": {
        "id": 1296269,
        "node_id": "MDEwOlJlcG9zaXRvcnkxMjk2MjY5",
        "name": "Hello-World",
        "full_name": "octocat/Hello-World",
        "owner": {
            "login": "octocat",
            "id": 1,
            "node_id": "MDQ6VXNlcjE=",
            "avatar_url": "https://github.com/images/error/octocat_happy.gif",
            "gravatar_id": "",
            "url": "https://api.github.com/users/octocat",
            "html_url": "https://github.com/octocat",
            "followers_url": "https://api.github.com/users/octocat/followers",
            "following_url": "https://api.github.com/users/octocat/following{/other_user}",
            "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
            "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
            "subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
            "organizations_url": "https://api.github.com/users/octocat/orgs",
            "repos_url": "https://api.github.com/users/octocat/repos",
            "events_url": "https://api.github.com/users/octocat/events{/privacy}",
            "received_events_url": "https://api.github.com/users/octocat/received_events",
            "type": "User",
            "site_admin": False,
        },
        "private": False,
        "html_url": "https://github.com/octocat/Hello-World",
        "description": "This your first repo!",
        "fork": False,
        "url": "https://api.github.com/repos/octocat/Hello-World",
        "archive_url": "https://api.github.com/repos/octocat/Hello-World/{archive_format}{/ref}",
        "assignees_url": "https://api.github.com/repos/octocat/Hello-World/assignees{/user}",
        "blobs_url": "https://api.github.com/repos/octocat/Hello-World/git/blobs{/sha}",
        "branches_url": "https://api.github.com/repos/octocat/Hello-World/branches{/branch}",
        "collaborators_url": "https://api.github.com/repos/octocat/Hello-World/collaborators{/collaborator}",
        "comments_url": "https://api.github.com/repos/octocat/Hello-World/comments{/number}",
        "commits_url": "https://api.github.com/repos/octocat/Hello-World/commits{/sha}",
        "compare_url": "https://api.github.com/repos/octocat/Hello-World/compare/{base}...{head}",
        "contents_url": "https://api.github.com/repos/octocat/Hello-World/contents/{+path}",
        "contributors_url": "https://api.github.com/repos/octocat/Hello-World/contributors",
        "deployments_url": "https://api.github.com/repos/octocat/Hello-World/deployments",
        "downloads_url": "https://api.github.com/repos/octocat/Hello-World/downloads",
        "events_url": "https://api.github.com/repos/octocat/Hello-World/events",
        "forks_url": "https://api.github.com/repos/octocat/Hello-World/forks",
        "git_commits_url": "https://api.github.com/repos/octocat/Hello-World/git/commits{/sha}",
        "git_refs_url": "https://api.github.com/repos/octocat/Hello-World/git/refs{/sha}",
        "git_tags_url": "https://api.github.com/repos/octocat/Hello-World/git/tags{/sha}",
        "issue_comment_url": "https://api.github.com/repos/octocat/Hello-World/issues/comments{/number}",
        "issue_events_url": "https://api.github.com/repos/octocat/Hello-World/issues/events{/number}",
        "issues_url": "https://api.github.com/repos/octocat/Hello-World/issues{/number}",
        "keys_url": "https://api.github.com/repos/octocat/Hello-World/keys{/key_id}",
        "labels_url": "https://api.github.com/repos/octocat/Hello-World/labels{/name}",
        "languages_url": "https://api.github.com/repos/octocat/Hello-World/languages",
        "merges_url": "https://api.github.com/repos/octocat/Hello-World/merges",
        "milestones_url": "https://api.github.com/repos/octocat/Hello-World/milestones{/number}",
        "notifications_url": "https://api.github.com/repos/octocat/Hello-World/notifications{?since,all,participating}",
        "pulls_url": "https://api.github.com/repos/octocat/Hello-World/pulls{/number}",
        "releases_url": "https://api.github.com/repos/octocat/Hello-World/releases{/id}",
        "stargazers_url": "https://api.github.com/repos/octocat/Hello-World/stargazers",
        "statuses_url": "https://api.github.com/repos/octocat/Hello-World/statuses/{sha}",
        "subscribers_url": "https://api.github.com/repos/octocat/Hello-World/subscribers",
        "subscription_url": "https://api.github.com/repos/octocat/Hello-World/subscription",
        "tags_url": "https://api.github.com/repos/octocat/Hello-World/tags",
        "teams_url": "https://api.github.com/repos/octocat/Hello-World/teams",
        "trees_url": "https://api.github.com/repos/octocat/Hello-World/git/trees{/sha}",
        "hooks_url": "https://api.github.com/repos/octocat/Hello-World/hooks",
    },
}


class RuleTestCase(unittest.TestCase):
    def test_from_dict(self):
        rule = Rule.from_dict(
            {
                "id": "js/zipslip",
                "severity": "error",
                "tags": ["security", "external/cwe/cwe-022"],
                "description": "Arbitrary file write during zip extraction",
                "name": "js/zipslip",
            }
        )

        self.assertEqual(rule.id, "js/zipslip")
        self.assertEqual(rule.name, "js/zipslip")
        self.assertEqual(rule.severity, Severity.ERROR)
        self.assertEqual(len(rule.tags), 2)
        self.assertEqual(rule.tags, ["security", "external/cwe/cwe-022"])
        self.assertEqual(
            rule.description, "Arbitrary file write during zip extraction"
        )


class LocationTestCase(unittest.TestCase):
    def test_from_dict(self):
        location = Location.from_dict(
            {
                "path": "lib/ab12-gen.js",
                "start_line": 917,
                "end_line": 917,
                "start_column": 7,
                "end_column": 18,
            }
        )

        self.assertEqual(location.path, "lib/ab12-gen.js")
        self.assertEqual(location.start_line, 917)
        self.assertEqual(location.end_line, 917)
        self.assertEqual(location.start_column, 7)
        self.assertEqual(location.end_column, 18)


class InstanceTestCase(unittest.TestCase):
    def test_from_dict(self):
        instance = Instance.from_dict(
            {
                "ref": "refs/heads/main",
                "analysis_key": ".github/workflows/codeql-analysis.yml:CodeQL-Build",
                "category": ".github/workflows/codeql-analysis.yml:CodeQL-Build",
                "environment": "{}",
                "state": "open",
                "commit_sha": "39406e42cb832f683daa691dd652a8dc36ee8930",
                "message": {
                    "text": "This path depends on a user-provided value."
                },
                "location": {
                    "path": "lib/ab12-gen.js",
                    "start_line": 917,
                    "end_line": 917,
                    "start_column": 7,
                    "end_column": 18,
                },
                "classifications": [],
            }
        )

        self.assertEqual(instance.ref, "refs/heads/main")
        self.assertEqual(
            instance.analysis_key,
            ".github/workflows/codeql-analysis.yml:CodeQL-Build",
        )
        self.assertEqual(
            instance.category,
            ".github/workflows/codeql-analysis.yml:CodeQL-Build",
        )
        self.assertEqual(instance.environment, "{}")
        self.assertEqual(instance.state, AlertState.OPEN)
        self.assertEqual(
            instance.commit_sha, "39406e42cb832f683daa691dd652a8dc36ee8930"
        )
        self.assertEqual(
            instance.message.text, "This path depends on a user-provided value."
        )

        self.assertEqual(instance.location.path, "lib/ab12-gen.js")
        self.assertEqual(instance.location.start_line, 917)
        self.assertEqual(instance.location.end_line, 917)
        self.assertEqual(instance.location.start_column, 7)
        self.assertEqual(instance.location.end_column, 18)

        self.assertEqual(len(instance.classifications), 0)


class ToolTestCase(unittest.TestCase):
    def test_from_dict(self):
        tool = Tool.from_dict(
            {"name": "CodeQL", "guid": None, "version": "2.4.0"}
        )

        self.assertEqual(tool.name, "CodeQL")
        self.assertEqual(tool.version, "2.4.0")
        self.assertIsNone(tool.guid)


class CodeScanningAlertTestCase(unittest.TestCase):
    def test_from_dict(self):
        alert = CodeScanningAlert.from_dict(ALERT)

        self.assertEqual(alert.number, 3)
        self.assertEqual(
            alert.created_at,
            datetime(2020, 2, 13, 12, 29, 18, tzinfo=timezone.utc),
        )
        self.assertEqual(
            alert.url,
            "https://api.github.com/repos/octocat/hello-world/code-scanning/alerts/3",
        )
        self.assertEqual(
            alert.html_url,
            "https://github.com/octocat/hello-world/code-scanning/3",
        )
        self.assertEqual(alert.state, AlertState.DISMISSED)
        self.assertEqual(
            alert.dismissed_at,
            datetime(2020, 2, 14, 12, 29, 18, tzinfo=timezone.utc),
        )
        self.assertEqual(alert.dismissed_by.login, "octocat")
        self.assertEqual(
            alert.dismissed_comment,
            "This alert is not actually correct, because there's a sanitizer "
            "included in the library.",
        )
        self.assertEqual(alert.rule.id, "js/zipslip")
        self.assertEqual(alert.tool.name, "CodeQL")
        self.assertEqual(alert.most_recent_instance.ref, "refs/heads/main")
        self.assertEqual(
            alert.most_recent_instance.location.path, "lib/ab12-gen.js"
        )
        self.assertEqual(
            alert.instances_url,
            "https://api.github.com/repos/octocat/hello-world/code-scanning/alerts/3/instances",
        )
        self.assertEqual(alert.repository.id, 1296269)


class AnalysisTestCase(unittest.TestCase):
    def test_from_dict(self):
        analysis = Analysis.from_dict(
            {
                "ref": "refs/heads/main",
                "commit_sha": "d99612c3e1f2970085cfbaeadf8f010ef69bad83",
                "analysis_key": ".github/workflows/codeql-analysis.yml:analyze",
                "environment": '{"language":"python"}',
                "error": "",
                "category": ".github/workflows/codeql-analysis.yml:analyze/language:python",
                "created_at": "2020-08-27T15:05:21Z",
                "results_count": 17,
                "rules_count": 49,
                "id": 201,
                "url": "https://api.github.com/repos/octocat/hello-world/code-scanning/analyses/201",
                "sarif_id": "6c81cd8e-b078-4ac3-a3be-1dad7dbd0b53",
                "tool": {"name": "CodeQL", "guid": None, "version": "2.4.0"},
                "deletable": True,
                "warning": "",
            }
        )

        self.assertEqual(analysis.ref, "refs/heads/main")
        self.assertEqual(
            analysis.commit_sha, "d99612c3e1f2970085cfbaeadf8f010ef69bad83"
        )
        self.assertEqual(
            analysis.analysis_key,
            ".github/workflows/codeql-analysis.yml:analyze",
        )
        self.assertEqual(analysis.environment, '{"language":"python"}')
        self.assertEqual(analysis.error, "")
        self.assertEqual(analysis.warning, "")
        self.assertEqual(
            analysis.category,
            ".github/workflows/codeql-analysis.yml:analyze/language:python",
        )
        self.assertEqual(
            analysis.created_at,
            datetime(2020, 8, 27, 15, 5, 21, tzinfo=timezone.utc),
        )
        self.assertEqual(analysis.results_count, 17)
        self.assertEqual(analysis.rules_count, 49)
        self.assertEqual(analysis.id, 201)
        self.assertEqual(
            analysis.url,
            "https://api.github.com/repos/octocat/hello-world/code-scanning/analyses/201",
        )
        self.assertEqual(
            analysis.sarif_id, "6c81cd8e-b078-4ac3-a3be-1dad7dbd0b53"
        )
        self.assertEqual(analysis.tool.name, "CodeQL")
        self.assertEqual(analysis.tool.version, "2.4.0")
        self.assertIsNone(analysis.tool.guid)
        self.assertTrue(analysis.deletable)


class CodeQLDatabaseTestCase(unittest.TestCase):
    def test_from_dict(self):
        db = CodeQLDatabase.from_dict(
            {
                "id": 1,
                "name": "database.zip",
                "language": "java",
                "uploader": {
                    "login": "octocat",
                    "id": 1,
                    "node_id": "MDQ6VXNlcjE=",
                    "avatar_url": "https://github.com/images/error/octocat_happy.gif",
                    "gravatar_id": "",
                    "url": "https://api.github.com/users/octocat",
                    "html_url": "https://github.com/octocat",
                    "followers_url": "https://api.github.com/users/octocat/followers",
                    "following_url": "https://api.github.com/users/octocat/following{/other_user}",
                    "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
                    "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
                    "subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
                    "organizations_url": "https://api.github.com/users/octocat/orgs",
                    "repos_url": "https://api.github.com/users/octocat/repos",
                    "events_url": "https://api.github.com/users/octocat/events{/privacy}",
                    "received_events_url": "https://api.github.com/users/octocat/received_events",
                    "type": "User",
                    "site_admin": False,
                },
                "content_type": "application/zip",
                "size": 1024,
                "created_at": "2022-09-12T12:14:32Z",
                "updated_at": "2022-09-12T12:14:32Z",
                "url": "https://api.github.com/repos/octocat/Hello-World/code-scanning/codeql/databases/java",
                "commit_oid": "12345678901234567000",
            }
        )

        self.assertEqual(db.id, 1)
        self.assertEqual(db.name, "database.zip")
        self.assertEqual(db.language, "java")
        self.assertEqual(db.uploader.id, 1)
        self.assertEqual(db.content_type, "application/zip")
        self.assertEqual(db.size, 1024)
        self.assertEqual(
            db.created_at,
            datetime(2022, 9, 12, 12, 14, 32, tzinfo=timezone.utc),
        )
        self.assertEqual(
            db.updated_at,
            datetime(2022, 9, 12, 12, 14, 32, tzinfo=timezone.utc),
        )
        self.assertEqual(
            db.url,
            "https://api.github.com/repos/octocat/Hello-World/code-scanning/codeql/databases/java",
        )
        self.assertEqual(db.commit_oid, "12345678901234567000")


class DefaultSetupTestCase(unittest.TestCase):
    def test_from_dict(self):
        setup = DefaultSetup.from_dict(
            {
                "state": "configured",
                "languages": ["ruby", "python"],
                "query_suite": "default",
                "updated_at": "2023-01-19T11:21:34Z",
                "schedule": "weekly",
            }
        )

        self.assertEqual(setup.state, DefaultSetupState.CONFIGURED)
        self.assertEqual(setup.languages, [Language.RUBY, Language.PYTHON])
        self.assertEqual(setup.query_suite, QuerySuite.DEFAULT)
        self.assertEqual(
            setup.updated_at,
            datetime(2023, 1, 19, 11, 21, 34, tzinfo=timezone.utc),
        )
        self.assertEqual(setup.schedule, "weekly")


class SarifUploadInformationTestCase(unittest.TestCase):
    def test_from_dict(self):
        sarif = SarifUploadInformation.from_dict(
            {
                "processing_status": "complete",
                "analyses_url": "https://api.github.com/repos/octocat/hello-world/code-scanning/analyses?sarif_id=47177e22-5596-11eb-80a1-c1e54ef945c6",
            }
        )
        self.assertEqual(
            sarif.processing_status, SarifProcessingStatus.COMPLETE
        )
        self.assertEqual(
            sarif.analyses_url,
            "https://api.github.com/repos/octocat/hello-world/code-scanning/analyses?sarif_id=47177e22-5596-11eb-80a1-c1e54ef945c6",
        )
        self.assertIsNone(sarif.errors)
pontos-25.3.2/tests/github/models/test_dependabot.py000066400000000000000000000510431476255566300226000ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later

# ruff: noqa:E501

import unittest
from datetime import datetime, timezone

from pontos.github.models.dependabot import (
    AlertState,
    DependabotAlert,
    DependencyScope,
    DismissedReason,
    IdentifierType,
    SecurityAdvisory,
    Severity,
    Vulnerability,
    VulnerablePackage,
)


class VulnerablePackageTestCase(unittest.TestCase):
    def test_from_dict(self):
        package = VulnerablePackage.from_dict(
            {
                "ecosystem": "pip",
                "name": "django",
            }
        )

        self.assertEqual(package.ecosystem, "pip")
        self.assertEqual(package.name, "django")


class VulnerabilityTestCase(unittest.TestCase):
    def test_from_dict(self):
        vulnerability = Vulnerability.from_dict(
            {
                "package": {
                    "ecosystem": "pip",
                    "name": "django",
                },
                "severity": "high",
                "vulnerable_version_range": ">= 2.0.0, < 2.0.2",
                "first_patched_version": {"identifier": "2.0.2"},
            }
        )

        self.assertEqual(vulnerability.package.ecosystem, "pip")
        self.assertEqual(vulnerability.package.name, "django")
        self.assertEqual(vulnerability.severity, Severity.HIGH)
        self.assertEqual(
            vulnerability.vulnerable_version_range, ">= 2.0.0, < 2.0.2"
        )
        self.assertEqual(
            vulnerability.first_patched_version.identifier, "2.0.2"
        )

        vulnerability = Vulnerability.from_dict(
            {
                "package": {
                    "ecosystem": "pip",
                    "name": "django",
                },
                "severity": "high",
                "vulnerable_version_range": ">= 2.0.0, < 2.0.2",
            }
        )

        self.assertEqual(vulnerability.package.ecosystem, "pip")
        self.assertEqual(vulnerability.package.name, "django")
        self.assertEqual(vulnerability.severity, Severity.HIGH)
        self.assertIsNone(vulnerability.first_patched_version)


SECURITY_ADVISORY = {
    "ghsa_id": "GHSA-rf4j-j272-fj86",
    "cve_id": "CVE-2018-6188",
    "summary": "Django allows remote attackers to obtain potentially sensitive information by leveraging data exposure from the confirm_login_allowed() method, as demonstrated by discovering whether a user account is inactive",
    "description": "django.contrib.auth.forms.AuthenticationForm in Django 2.0 before 2.0.2, and 1.11.8 and 1.11.9, allows remote attackers to obtain potentially sensitive information by leveraging data exposure from the confirm_login_allowed() method, as demonstrated by discovering whether a user account is inactive.",
    "vulnerabilities": [
        {
            "package": {"ecosystem": "pip", "name": "django"},
            "severity": "high",
            "vulnerable_version_range": ">= 2.0.0, < 2.0.2",
            "first_patched_version": {"identifier": "2.0.2"},
        },
        {
            "package": {"ecosystem": "pip", "name": "django"},
            "severity": "high",
            "vulnerable_version_range": ">= 1.11.8, < 1.11.10",
            "first_patched_version": {"identifier": "1.11.10"},
        },
    ],
    "severity": "high",
    "cvss": {
        "vector_string": "CVSS:3.0/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:N/A:N",
        "score": 7.5,
    },
    "cwes": [
        {
            "cwe_id": "CWE-200",
            "name": "Exposure of Sensitive Information to an Unauthorized Actor",
        }
    ],
    "identifiers": [
        {"type": "GHSA", "value": "GHSA-rf4j-j272-fj86"},
        {"type": "CVE", "value": "CVE-2018-6188"},
    ],
    "references": [
        {"url": "https://nvd.nist.gov/vuln/detail/CVE-2018-6188"},
        {"url": "https://github.com/advisories/GHSA-rf4j-j272-fj86"},
        {"url": "https://usn.ubuntu.com/3559-1/"},
        {
            "url": "https://www.djangoproject.com/weblog/2018/feb/01/security-releases/"
        },
        {"url": "http://www.securitytracker.com/id/1040422"},
    ],
    "published_at": "2018-10-03T21:13:54Z",
    "updated_at": "2022-04-26T18:35:37Z",
    "withdrawn_at": None,
}


class SecurityAdvisoryTestCase(unittest.TestCase):
    def test_from_dict(self):
        advisory = SecurityAdvisory.from_dict(SECURITY_ADVISORY)

        self.assertEqual(advisory.cve_id, "CVE-2018-6188")
        self.assertEqual(advisory.ghsa_id, "GHSA-rf4j-j272-fj86")
        self.assertEqual(
            advisory.cvss.vector_string,
            "CVSS:3.0/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:N/A:N",
        )
        self.assertEqual(advisory.cvss.score, 7.5)
        self.assertRegex(advisory.summary, "^Django allows remote attacker.*")
        self.assertRegex(
            advisory.description,
            "^django\.contrib\.auth\.forms\.AuthenticationForm in Django 2\.0.*",
        )

        self.assertEqual(len(advisory.vulnerabilities), 2)
        vuln = advisory.vulnerabilities[0]
        self.assertEqual(vuln.package.ecosystem, "pip")
        self.assertEqual(vuln.package.name, "django")
        self.assertEqual(vuln.severity, Severity.HIGH)
        self.assertEqual(vuln.vulnerable_version_range, ">= 2.0.0, < 2.0.2")
        self.assertEqual(vuln.first_patched_version.identifier, "2.0.2")

        vuln = advisory.vulnerabilities[1]
        self.assertEqual(vuln.package.ecosystem, "pip")
        self.assertEqual(vuln.package.name, "django")
        self.assertEqual(vuln.severity, Severity.HIGH)
        self.assertEqual(vuln.vulnerable_version_range, ">= 1.11.8, < 1.11.10")
        self.assertEqual(vuln.first_patched_version.identifier, "1.11.10")

        self.assertEqual(len(advisory.cwes), 1)
        cwe = advisory.cwes[0]
        self.assertEqual(cwe.cwe_id, "CWE-200")
        self.assertEqual(
            cwe.name,
            "Exposure of Sensitive Information to an Unauthorized Actor",
        )

        self.assertEqual(len(advisory.identifiers), 2)
        identifier = advisory.identifiers[0]
        self.assertEqual(identifier.type, IdentifierType.GHSA)
        self.assertEqual(identifier.value, "GHSA-rf4j-j272-fj86")

        identifier = advisory.identifiers[1]
        self.assertEqual(identifier.type, IdentifierType.CVE)
        self.assertEqual(identifier.value, "CVE-2018-6188")

        self.assertEqual(len(advisory.references), 5)
        self.assertEqual(
            advisory.references[0].url,
            "https://nvd.nist.gov/vuln/detail/CVE-2018-6188",
        )

        self.assertEqual(
            advisory.published_at,
            datetime(2018, 10, 3, 21, 13, 54, tzinfo=timezone.utc),
        )
        self.assertEqual(
            advisory.updated_at,
            datetime(2022, 4, 26, 18, 35, 37, tzinfo=timezone.utc),
        )

        self.assertIsNone(advisory.withdrawn_at)


DEPENDABOT_ALERT = {
    "number": 2,
    "state": "dismissed",
    "dependency": {
        "package": {"ecosystem": "pip", "name": "django"},
        "manifest_path": "path/to/requirements.txt",
        "scope": "runtime",
    },
    "security_advisory": {
        "ghsa_id": "GHSA-rf4j-j272-fj86",
        "cve_id": "CVE-2018-6188",
        "summary": "Django allows remote attackers to obtain potentially sensitive information by leveraging data exposure from the confirm_login_allowed() method, as demonstrated by discovering whether a user account is inactive",
        "description": "django.contrib.auth.forms.AuthenticationForm in Django 2.0 before 2.0.2, and 1.11.8 and 1.11.9, allows remote attackers to obtain potentially sensitive information by leveraging data exposure from the confirm_login_allowed() method, as demonstrated by discovering whether a user account is inactive.",
        "vulnerabilities": [
            {
                "package": {"ecosystem": "pip", "name": "django"},
                "severity": "high",
                "vulnerable_version_range": ">= 2.0.0, < 2.0.2",
                "first_patched_version": {"identifier": "2.0.2"},
            },
            {
                "package": {"ecosystem": "pip", "name": "django"},
                "severity": "high",
                "vulnerable_version_range": ">= 1.11.8, < 1.11.10",
                "first_patched_version": {"identifier": "1.11.10"},
            },
        ],
        "severity": "high",
        "cvss": {
            "vector_string": "CVSS:3.0/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:N/A:N",
            "score": 7.5,
        },
        "cwes": [
            {
                "cwe_id": "CWE-200",
                "name": "Exposure of Sensitive Information to an Unauthorized Actor",
            }
        ],
        "identifiers": [
            {"type": "GHSA", "value": "GHSA-rf4j-j272-fj86"},
            {"type": "CVE", "value": "CVE-2018-6188"},
        ],
        "references": [
            {"url": "https://nvd.nist.gov/vuln/detail/CVE-2018-6188"},
            {"url": "https://github.com/advisories/GHSA-rf4j-j272-fj86"},
            {"url": "https://usn.ubuntu.com/3559-1/"},
            {
                "url": "https://www.djangoproject.com/weblog/2018/feb/01/security-releases/"
            },
            {"url": "http://www.securitytracker.com/id/1040422"},
        ],
        "published_at": "2018-10-03T21:13:54Z",
        "updated_at": "2022-04-26T18:35:37Z",
        "withdrawn_at": None,
    },
    "security_vulnerability": {
        "package": {"ecosystem": "pip", "name": "django"},
        "severity": "high",
        "vulnerable_version_range": ">= 2.0.0, < 2.0.2",
        "first_patched_version": {"identifier": "2.0.2"},
    },
    "url": "https://api.github.com/repos/octo-org/octo-repo/dependabot/alerts/2",
    "html_url": "https://github.com/octo-org/octo-repo/security/dependabot/2",
    "created_at": "2022-06-15T07:43:03Z",
    "updated_at": "2022-08-23T14:29:47Z",
    "dismissed_at": "2022-08-23T14:29:47Z",
    "dismissed_by": {
        "login": "octocat",
        "id": 1,
        "node_id": "MDQ6VXNlcjE=",
        "avatar_url": "https://github.com/images/error/octocat_happy.gif",
        "gravatar_id": "",
        "url": "https://api.github.com/users/octocat",
        "html_url": "https://github.com/octocat",
        "followers_url": "https://api.github.com/users/octocat/followers",
        "following_url": "https://api.github.com/users/octocat/following{/other_user}",
        "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
        "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
        "subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
        "organizations_url": "https://api.github.com/users/octocat/orgs",
        "repos_url": "https://api.github.com/users/octocat/repos",
        "events_url": "https://api.github.com/users/octocat/events{/privacy}",
        "received_events_url": "https://api.github.com/users/octocat/received_events",
        "type": "User",
        "site_admin": False,
    },
    "dismissed_reason": "tolerable_risk",
    "dismissed_comment": "This alert is accurate but we use a sanitizer.",
    "fixed_at": None,
    "repository": {
        "id": 217723378,
        "node_id": "MDEwOlJlcG9zaXRvcnkyMTc3MjMzNzg=",
        "name": "octo-repo",
        "full_name": "octo-org/octo-repo",
        "owner": {
            "login": "octo-org",
            "id": 6811672,
            "node_id": "MDEyOk9yZ2FuaXphdGlvbjY4MTE2NzI=",
            "avatar_url": "https://avatars3.githubusercontent.com/u/6811672?v=4",
            "gravatar_id": "",
            "url": "https://api.github.com/users/octo-org",
            "html_url": "https://github.com/octo-org",
            "followers_url": "https://api.github.com/users/octo-org/followers",
            "following_url": "https://api.github.com/users/octo-org/following{/other_user}",
            "gists_url": "https://api.github.com/users/octo-org/gists{/gist_id}",
            "starred_url": "https://api.github.com/users/octo-org/starred{/owner}{/repo}",
            "subscriptions_url": "https://api.github.com/users/octo-org/subscriptions",
            "organizations_url": "https://api.github.com/users/octo-org/orgs",
            "repos_url": "https://api.github.com/users/octo-org/repos",
            "events_url": "https://api.github.com/users/octo-org/events{/privacy}",
            "received_events_url": "https://api.github.com/users/octo-org/received_events",
            "type": "Organization",
            "site_admin": False,
        },
        "private": True,
        "html_url": "https://github.com/octo-org/octo-repo",
        "description": None,
        "fork": False,
        "url": "https://api.github.com/repos/octo-org/octo-repo",
        "archive_url": "https://api.github.com/repos/octo-org/octo-repo/{archive_format}{/ref}",
        "assignees_url": "https://api.github.com/repos/octo-org/octo-repo/assignees{/user}",
        "blobs_url": "https://api.github.com/repos/octo-org/octo-repo/git/blobs{/sha}",
        "branches_url": "https://api.github.com/repos/octo-org/octo-repo/branches{/branch}",
        "collaborators_url": "https://api.github.com/repos/octo-org/octo-repo/collaborators{/collaborator}",
        "comments_url": "https://api.github.com/repos/octo-org/octo-repo/comments{/number}",
        "commits_url": "https://api.github.com/repos/octo-org/octo-repo/commits{/sha}",
        "compare_url": "https://api.github.com/repos/octo-org/octo-repo/compare/{base}...{head}",
        "contents_url": "https://api.github.com/repos/octo-org/octo-repo/contents/{+path}",
        "contributors_url": "https://api.github.com/repos/octo-org/octo-repo/contributors",
        "deployments_url": "https://api.github.com/repos/octo-org/octo-repo/deployments",
        "downloads_url": "https://api.github.com/repos/octo-org/octo-repo/downloads",
        "events_url": "https://api.github.com/repos/octo-org/octo-repo/events",
        "forks_url": "https://api.github.com/repos/octo-org/octo-repo/forks",
        "git_commits_url": "https://api.github.com/repos/octo-org/octo-repo/git/commits{/sha}",
        "git_refs_url": "https://api.github.com/repos/octo-org/octo-repo/git/refs{/sha}",
        "git_tags_url": "https://api.github.com/repos/octo-org/octo-repo/git/tags{/sha}",
        "hooks_url": "https://api.github.com/repos/octo-org/octo-repo/hooks",
        "issue_comment_url": "https://api.github.com/repos/octo-org/octo-repo/issues/comments{/number}",
        "issue_events_url": "https://api.github.com/repos/octo-org/octo-repo/issues/events{/number}",
        "issues_url": "https://api.github.com/repos/octo-org/octo-repo/issues{/number}",
        "keys_url": "https://api.github.com/repos/octo-org/octo-repo/keys{/key_id}",
        "labels_url": "https://api.github.com/repos/octo-org/octo-repo/labels{/name}",
        "languages_url": "https://api.github.com/repos/octo-org/octo-repo/languages",
        "merges_url": "https://api.github.com/repos/octo-org/octo-repo/merges",
        "milestones_url": "https://api.github.com/repos/octo-org/octo-repo/milestones{/number}",
        "notifications_url": "https://api.github.com/repos/octo-org/octo-repo/notifications{?since,all,participating}",
        "pulls_url": "https://api.github.com/repos/octo-org/octo-repo/pulls{/number}",
        "releases_url": "https://api.github.com/repos/octo-org/octo-repo/releases{/id}",
        "stargazers_url": "https://api.github.com/repos/octo-org/octo-repo/stargazers",
        "statuses_url": "https://api.github.com/repos/octo-org/octo-repo/statuses/{sha}",
        "subscribers_url": "https://api.github.com/repos/octo-org/octo-repo/subscribers",
        "subscription_url": "https://api.github.com/repos/octo-org/octo-repo/subscription",
        "tags_url": "https://api.github.com/repos/octo-org/octo-repo/tags",
        "teams_url": "https://api.github.com/repos/octo-org/octo-repo/teams",
        "trees_url": "https://api.github.com/repos/octo-org/octo-repo/git/trees{/sha}",
    },
}


class DependabotAlertTestCase(unittest.TestCase):
    def test_from_dict(self):
        alert = DependabotAlert.from_dict(DEPENDABOT_ALERT)

        self.assertEqual(alert.number, 2)
        self.assertEqual(alert.state, AlertState.DISMISSED)

        self.assertEqual(alert.dependency.package.ecosystem, "pip")
        self.assertEqual(alert.dependency.package.name, "django")
        self.assertEqual(
            alert.dependency.manifest_path, "path/to/requirements.txt"
        )
        self.assertEqual(alert.dependency.scope, DependencyScope.RUNTIME)

        self.assertEqual(alert.security_advisory.cve_id, "CVE-2018-6188")
        self.assertEqual(alert.security_advisory.ghsa_id, "GHSA-rf4j-j272-fj86")
        self.assertEqual(
            alert.security_advisory.cvss.vector_string,
            "CVSS:3.0/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:N/A:N",
        )
        self.assertEqual(alert.security_advisory.cvss.score, 7.5)
        self.assertRegex(
            alert.security_advisory.summary, "^Django allows remote attacker.*"
        )
        self.assertRegex(
            alert.security_advisory.description,
            "^django\.contrib\.auth\.forms\.AuthenticationForm in Django 2\.0.*",
        )

        self.assertEqual(len(alert.security_advisory.vulnerabilities), 2)
        vuln = alert.security_advisory.vulnerabilities[0]
        self.assertEqual(vuln.package.ecosystem, "pip")
        self.assertEqual(vuln.package.name, "django")
        self.assertEqual(vuln.severity, Severity.HIGH)
        self.assertEqual(vuln.vulnerable_version_range, ">= 2.0.0, < 2.0.2")
        self.assertEqual(vuln.first_patched_version.identifier, "2.0.2")

        vuln = alert.security_advisory.vulnerabilities[1]
        self.assertEqual(vuln.package.ecosystem, "pip")
        self.assertEqual(vuln.package.name, "django")
        self.assertEqual(vuln.severity, Severity.HIGH)
        self.assertEqual(vuln.vulnerable_version_range, ">= 1.11.8, < 1.11.10")
        self.assertEqual(vuln.first_patched_version.identifier, "1.11.10")

        self.assertEqual(len(alert.security_advisory.cwes), 1)
        cwe = alert.security_advisory.cwes[0]
        self.assertEqual(cwe.cwe_id, "CWE-200")
        self.assertEqual(
            cwe.name,
            "Exposure of Sensitive Information to an Unauthorized Actor",
        )

        self.assertEqual(len(alert.security_advisory.identifiers), 2)
        identifier = alert.security_advisory.identifiers[0]
        self.assertEqual(identifier.type, IdentifierType.GHSA)
        self.assertEqual(identifier.value, "GHSA-rf4j-j272-fj86")

        identifier = alert.security_advisory.identifiers[1]
        self.assertEqual(identifier.type, IdentifierType.CVE)
        self.assertEqual(identifier.value, "CVE-2018-6188")

        self.assertEqual(len(alert.security_advisory.references), 5)
        self.assertEqual(
            alert.security_advisory.references[0].url,
            "https://nvd.nist.gov/vuln/detail/CVE-2018-6188",
        )

        self.assertEqual(
            alert.security_advisory.published_at,
            datetime(2018, 10, 3, 21, 13, 54, tzinfo=timezone.utc),
        )
        self.assertEqual(
            alert.security_advisory.updated_at,
            datetime(2022, 4, 26, 18, 35, 37, tzinfo=timezone.utc),
        )

        self.assertIsNone(alert.security_advisory.withdrawn_at)

        self.assertEqual(alert.security_vulnerability.package.ecosystem, "pip")
        self.assertEqual(alert.security_vulnerability.package.name, "django")
        self.assertEqual(
            alert.security_vulnerability.vulnerable_version_range,
            ">= 2.0.0, < 2.0.2",
        )
        self.assertEqual(
            alert.security_vulnerability.first_patched_version.identifier,
            "2.0.2",
        )
        self.assertEqual(alert.security_vulnerability.severity, Severity.HIGH)

        self.assertEqual(
            alert.url,
            "https://api.github.com/repos/octo-org/octo-repo/dependabot/alerts/2",
        )
        self.assertEqual(
            alert.html_url,
            "https://github.com/octo-org/octo-repo/security/dependabot/2",
        )

        self.assertEqual(
            alert.created_at,
            datetime(2022, 6, 15, 7, 43, 3, tzinfo=timezone.utc),
        )
        self.assertEqual(
            alert.updated_at,
            datetime(2022, 8, 23, 14, 29, 47, tzinfo=timezone.utc),
        )
        self.assertEqual(
            alert.dismissed_at,
            datetime(2022, 8, 23, 14, 29, 47, tzinfo=timezone.utc),
        )

        self.assertEqual(alert.dismissed_by.login, "octocat")
        self.assertEqual(alert.dismissed_reason, DismissedReason.TOLERABLE_RISK)
        self.assertEqual(
            alert.dismissed_comment,
            "This alert is accurate but we use a sanitizer.",
        )

        self.assertIsNone(alert.fixed_at)
        self.assertIsNone(alert.auto_dismissed_at)

        self.assertEqual(alert.repository.id, 217723378)
pontos-25.3.2/tests/github/models/test_organization.py000066400000000000000000000455121476255566300232030ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later
#

# pylint: disable=line-too-long, redefined-builtin
# ruff: noqa: E501

import unittest
from datetime import datetime, timezone

from pontos.github.models.organization import License, Repository


class LicenseTestCase(unittest.TestCase):
    def test_from_dict(self):
        data = {
            "key": "gpl-3.0",
            "name": "GNU General Public License v3.0",
            "spdx_id": "GPL-3.0",
            "url": "https://api.github.com/licenses/gpl-3.0",
            "node_id": "MDc6TGljZW5zZTk=",
        }

        license = License.from_dict(data)

        self.assertEqual(license.key, "gpl-3.0")
        self.assertEqual(license.name, "GNU General Public License v3.0")
        self.assertEqual(license.spdx_id, "GPL-3.0")
        self.assertEqual(license.url, "https://api.github.com/licenses/gpl-3.0")
        self.assertEqual(license.node_id, "MDc6TGljZW5zZTk=")


class RepositoryTestCase(unittest.TestCase):
    def test_from_dict(self):
        data = {
            "id": 103647077,
            "node_id": "MDEwOlJlcG9zaXRvcnkxMDM2NDcwNzc=",
            "name": "gvm-tools",
            "full_name": "greenbone/gvm-tools",
            "private": False,
            "owner": {
                "login": "greenbone",
                "id": 31986857,
                "node_id": "MDEyOk9yZ2FuaXphdGlvbjMxOTg2ODU3",
                "avatar_url": "https://avatars.githubusercontent.com/u/31986857?v=4",
                "gravatar_id": "",
                "url": "https://api.github.com/users/greenbone",
                "html_url": "https://github.com/greenbone",
                "followers_url": "https://api.github.com/users/greenbone/followers",
                "following_url": "https://api.github.com/users/greenbone/following{/other_user}",
                "gists_url": "https://api.github.com/users/greenbone/gists{/gist_id}",
                "starred_url": "https://api.github.com/users/greenbone/starred{/owner}{/repo}",
                "subscriptions_url": "https://api.github.com/users/greenbone/subscriptions",
                "organizations_url": "https://api.github.com/users/greenbone/orgs",
                "repos_url": "https://api.github.com/users/greenbone/repos",
                "events_url": "https://api.github.com/users/greenbone/events{/privacy}",
                "received_events_url": "https://api.github.com/users/greenbone/received_events",
                "type": "Organization",
                "site_admin": False,
            },
            "html_url": "https://github.com/greenbone/gvm-tools",
            "description": "Remote control your Greenbone Community Edition or Greenbone Enterprise Appliance",
            "fork": False,
            "url": "https://api.github.com/repos/greenbone/gvm-tools",
            "forks_url": "https://api.github.com/repos/greenbone/gvm-tools/forks",
            "keys_url": "https://api.github.com/repos/greenbone/gvm-tools/keys{/key_id}",
            "collaborators_url": "https://api.github.com/repos/greenbone/gvm-tools/collaborators{/collaborator}",
            "teams_url": "https://api.github.com/repos/greenbone/gvm-tools/teams",
            "hooks_url": "https://api.github.com/repos/greenbone/gvm-tools/hooks",
            "issue_events_url": "https://api.github.com/repos/greenbone/gvm-tools/issues/events{/number}",
            "events_url": "https://api.github.com/repos/greenbone/gvm-tools/events",
            "assignees_url": "https://api.github.com/repos/greenbone/gvm-tools/assignees{/user}",
            "branches_url": "https://api.github.com/repos/greenbone/gvm-tools/branches{/branch}",
            "tags_url": "https://api.github.com/repos/greenbone/gvm-tools/tags",
            "blobs_url": "https://api.github.com/repos/greenbone/gvm-tools/git/blobs{/sha}",
            "git_tags_url": "https://api.github.com/repos/greenbone/gvm-tools/git/tags{/sha}",
            "git_refs_url": "https://api.github.com/repos/greenbone/gvm-tools/git/refs{/sha}",
            "trees_url": "https://api.github.com/repos/greenbone/gvm-tools/git/trees{/sha}",
            "statuses_url": "https://api.github.com/repos/greenbone/gvm-tools/statuses/{sha}",
            "languages_url": "https://api.github.com/repos/greenbone/gvm-tools/languages",
            "stargazers_url": "https://api.github.com/repos/greenbone/gvm-tools/stargazers",
            "contributors_url": "https://api.github.com/repos/greenbone/gvm-tools/contributors",
            "subscribers_url": "https://api.github.com/repos/greenbone/gvm-tools/subscribers",
            "subscription_url": "https://api.github.com/repos/greenbone/gvm-tools/subscription",
            "commits_url": "https://api.github.com/repos/greenbone/gvm-tools/commits{/sha}",
            "git_commits_url": "https://api.github.com/repos/greenbone/gvm-tools/git/commits{/sha}",
            "comments_url": "https://api.github.com/repos/greenbone/gvm-tools/comments{/number}",
            "issue_comment_url": "https://api.github.com/repos/greenbone/gvm-tools/issues/comments{/number}",
            "contents_url": "https://api.github.com/repos/greenbone/gvm-tools/contents/{+path}",
            "compare_url": "https://api.github.com/repos/greenbone/gvm-tools/compare/{base}...{head}",
            "merges_url": "https://api.github.com/repos/greenbone/gvm-tools/merges",
            "archive_url": "https://api.github.com/repos/greenbone/gvm-tools/{archive_format}{/ref}",
            "downloads_url": "https://api.github.com/repos/greenbone/gvm-tools/downloads",
            "issues_url": "https://api.github.com/repos/greenbone/gvm-tools/issues{/number}",
            "pulls_url": "https://api.github.com/repos/greenbone/gvm-tools/pulls{/number}",
            "milestones_url": "https://api.github.com/repos/greenbone/gvm-tools/milestones{/number}",
            "notifications_url": "https://api.github.com/repos/greenbone/gvm-tools/notifications{?since,all,participating}",
            "labels_url": "https://api.github.com/repos/greenbone/gvm-tools/labels{/name}",
            "releases_url": "https://api.github.com/repos/greenbone/gvm-tools/releases{/id}",
            "deployments_url": "https://api.github.com/repos/greenbone/gvm-tools/deployments",
            "created_at": "2017-09-15T10:54:42Z",
            "updated_at": "2022-11-01T07:45:33Z",
            "pushed_at": "2022-11-07T09:21:30Z",
            "git_url": "git://github.com/greenbone/gvm-tools.git",
            "ssh_url": "git@github.com:greenbone/gvm-tools.git",
            "clone_url": "https://github.com/greenbone/gvm-tools.git",
            "svn_url": "https://github.com/greenbone/gvm-tools",
            "homepage": "https://greenbone.github.io/gvm-tools/",
            "size": 3461,
            "stargazers_count": 128,
            "watchers_count": 128,
            "language": "Python",
            "has_issues": True,
            "has_projects": False,
            "has_downloads": True,
            "has_wiki": True,
            "has_pages": True,
            "has_discussions": False,
            "forks_count": 74,
            "mirror_url": None,
            "archived": False,
            "disabled": False,
            "open_issues_count": 3,
            "license": {
                "key": "gpl-3.0",
                "name": "GNU General Public License v3.0",
                "spdx_id": "GPL-3.0",
                "url": "https://api.github.com/licenses/gpl-3.0",
                "node_id": "MDc6TGljZW5zZTk=",
            },
            "allow_forking": True,
            "is_template": False,
            "web_commit_signoff_required": False,
            "topics": [
                "gmp",
                "gmp-scripts",
                "greenbone",
                "greenbone-vulnerability-manager",
                "gvm",
                "gvm-cli",
                "gvm-pyshell",
                "gvm-script",
                "omp",
                "openvas",
                "openvas-cli",
                "osp",
                "python",
                "vulnerability",
                "vulnerability-assessment",
                "vulnerability-management",
            ],
            "visibility": "public",
            "forks": 74,
            "open_issues": 3,
            "watchers": 128,
            "default_branch": "main",
        }

        repo = Repository.from_dict(data)

        self.assertEqual(repo.id, 103647077)
        self.assertEqual(repo.node_id, "MDEwOlJlcG9zaXRvcnkxMDM2NDcwNzc=")
        self.assertEqual(repo.name, "gvm-tools")
        self.assertEqual(repo.full_name, "greenbone/gvm-tools")
        self.assertEqual(repo.private, False)
        self.assertEqual(repo.owner.login, "greenbone")
        self.assertEqual(repo.owner.id, 31986857)
        self.assertEqual(repo.owner.node_id, "MDEyOk9yZ2FuaXphdGlvbjMxOTg2ODU3")
        self.assertEqual(
            repo.owner.avatar_url,
            "https://avatars.githubusercontent.com/u/31986857?v=4",
        )
        self.assertEqual(repo.owner.gravatar_id, "")
        self.assertEqual(
            repo.owner.url, "https://api.github.com/users/greenbone"
        )
        self.assertEqual(repo.owner.html_url, "https://github.com/greenbone")
        self.assertEqual(
            repo.owner.followers_url,
            "https://api.github.com/users/greenbone/followers",
        )
        self.assertEqual(
            repo.owner.following_url,
            "https://api.github.com/users/greenbone/following{/other_user}",
        )
        self.assertEqual(
            repo.owner.gists_url,
            "https://api.github.com/users/greenbone/gists{/gist_id}",
        )
        self.assertEqual(
            repo.owner.starred_url,
            "https://api.github.com/users/greenbone/starred{/owner}{/repo}",
        )
        self.assertEqual(
            repo.owner.subscriptions_url,
            "https://api.github.com/users/greenbone/subscriptions",
        )
        self.assertEqual(
            repo.owner.organizations_url,
            "https://api.github.com/users/greenbone/orgs",
        )
        self.assertEqual(
            repo.owner.repos_url, "https://api.github.com/users/greenbone/repos"
        )
        self.assertEqual(
            repo.owner.events_url,
            "https://api.github.com/users/greenbone/events{/privacy}",
        )
        self.assertEqual(
            repo.owner.received_events_url,
            "https://api.github.com/users/greenbone/received_events",
        )
        self.assertEqual(repo.owner.type, "Organization")
        self.assertFalse(repo.owner.site_admin)
        self.assertEqual(
            repo.html_url, "https://github.com/greenbone/gvm-tools"
        )
        self.assertEqual(
            repo.description,
            "Remote control your Greenbone Community Edition or Greenbone Enterprise Appliance",
        )
        self.assertEqual(repo.fork, False)
        self.assertEqual(
            repo.url, "https://api.github.com/repos/greenbone/gvm-tools"
        )
        self.assertEqual(
            repo.forks_url,
            "https://api.github.com/repos/greenbone/gvm-tools/forks",
        )
        self.assertEqual(
            repo.keys_url,
            "https://api.github.com/repos/greenbone/gvm-tools/keys{/key_id}",
        )
        self.assertEqual(
            repo.collaborators_url,
            "https://api.github.com/repos/greenbone/gvm-tools/collaborators{/collaborator}",
        )
        self.assertEqual(
            repo.teams_url,
            "https://api.github.com/repos/greenbone/gvm-tools/teams",
        )
        self.assertEqual(
            repo.hooks_url,
            "https://api.github.com/repos/greenbone/gvm-tools/hooks",
        )
        self.assertEqual(
            repo.issue_events_url,
            "https://api.github.com/repos/greenbone/gvm-tools/issues/events{/number}",
        )
        self.assertEqual(
            repo.events_url,
            "https://api.github.com/repos/greenbone/gvm-tools/events",
        )
        self.assertEqual(
            repo.assignees_url,
            "https://api.github.com/repos/greenbone/gvm-tools/assignees{/user}",
        )
        self.assertEqual(
            repo.branches_url,
            "https://api.github.com/repos/greenbone/gvm-tools/branches{/branch}",
        )
        self.assertEqual(
            repo.tags_url,
            "https://api.github.com/repos/greenbone/gvm-tools/tags",
        )
        self.assertEqual(
            repo.blobs_url,
            "https://api.github.com/repos/greenbone/gvm-tools/git/blobs{/sha}",
        )
        self.assertEqual(
            repo.git_tags_url,
            "https://api.github.com/repos/greenbone/gvm-tools/git/tags{/sha}",
        )
        self.assertEqual(
            repo.git_refs_url,
            "https://api.github.com/repos/greenbone/gvm-tools/git/refs{/sha}",
        )
        self.assertEqual(
            repo.trees_url,
            "https://api.github.com/repos/greenbone/gvm-tools/git/trees{/sha}",
        )
        self.assertEqual(
            repo.statuses_url,
            "https://api.github.com/repos/greenbone/gvm-tools/statuses/{sha}",
        )
        self.assertEqual(
            repo.languages_url,
            "https://api.github.com/repos/greenbone/gvm-tools/languages",
        )
        self.assertEqual(
            repo.stargazers_url,
            "https://api.github.com/repos/greenbone/gvm-tools/stargazers",
        )
        self.assertEqual(
            repo.contributors_url,
            "https://api.github.com/repos/greenbone/gvm-tools/contributors",
        )
        self.assertEqual(
            repo.subscribers_url,
            "https://api.github.com/repos/greenbone/gvm-tools/subscribers",
        )
        self.assertEqual(
            repo.subscription_url,
            "https://api.github.com/repos/greenbone/gvm-tools/subscription",
        )
        self.assertEqual(
            repo.commits_url,
            "https://api.github.com/repos/greenbone/gvm-tools/commits{/sha}",
        )
        self.assertEqual(
            repo.git_commits_url,
            "https://api.github.com/repos/greenbone/gvm-tools/git/commits{/sha}",
        )
        self.assertEqual(
            repo.comments_url,
            "https://api.github.com/repos/greenbone/gvm-tools/comments{/number}",
        )
        self.assertEqual(
            repo.issue_comment_url,
            "https://api.github.com/repos/greenbone/gvm-tools/issues/comments{/number}",
        )
        self.assertEqual(
            repo.contents_url,
            "https://api.github.com/repos/greenbone/gvm-tools/contents/{+path}",
        )
        self.assertEqual(
            repo.compare_url,
            "https://api.github.com/repos/greenbone/gvm-tools/compare/{base}...{head}",
        )
        self.assertEqual(
            repo.merges_url,
            "https://api.github.com/repos/greenbone/gvm-tools/merges",
        )
        self.assertEqual(
            repo.archive_url,
            "https://api.github.com/repos/greenbone/gvm-tools/{archive_format}{/ref}",
        )
        self.assertEqual(
            repo.downloads_url,
            "https://api.github.com/repos/greenbone/gvm-tools/downloads",
        )
        self.assertEqual(
            repo.issues_url,
            "https://api.github.com/repos/greenbone/gvm-tools/issues{/number}",
        )
        self.assertEqual(
            repo.pulls_url,
            "https://api.github.com/repos/greenbone/gvm-tools/pulls{/number}",
        )
        self.assertEqual(
            repo.milestones_url,
            "https://api.github.com/repos/greenbone/gvm-tools/milestones{/number}",
        )
        self.assertEqual(
            repo.notifications_url,
            "https://api.github.com/repos/greenbone/gvm-tools/notifications{?since,all,participating}",
        )
        self.assertEqual(
            repo.labels_url,
            "https://api.github.com/repos/greenbone/gvm-tools/labels{/name}",
        )
        self.assertEqual(
            repo.releases_url,
            "https://api.github.com/repos/greenbone/gvm-tools/releases{/id}",
        )
        self.assertEqual(
            repo.deployments_url,
            "https://api.github.com/repos/greenbone/gvm-tools/deployments",
        )
        self.assertEqual(
            repo.created_at,
            datetime(2017, 9, 15, 10, 54, 42, tzinfo=timezone.utc),
        )
        self.assertEqual(
            repo.updated_at,
            datetime(2022, 11, 1, 7, 45, 33, tzinfo=timezone.utc),
        )
        self.assertEqual(
            repo.pushed_at,
            datetime(2022, 11, 7, 9, 21, 30, tzinfo=timezone.utc),
        )
        self.assertEqual(
            repo.git_url, "git://github.com/greenbone/gvm-tools.git"
        )
        self.assertEqual(repo.ssh_url, "git@github.com:greenbone/gvm-tools.git")
        self.assertEqual(
            repo.clone_url, "https://github.com/greenbone/gvm-tools.git"
        )
        self.assertEqual(repo.svn_url, "https://github.com/greenbone/gvm-tools")
        self.assertEqual(
            repo.homepage, "https://greenbone.github.io/gvm-tools/"
        )
        self.assertEqual(repo.size, 3461)
        self.assertEqual(repo.stargazers_count, 128)
        self.assertEqual(repo.watchers_count, 128)
        self.assertEqual(repo.language, "Python")
        self.assertEqual(repo.has_issues, True)
        self.assertEqual(repo.has_projects, False)
        self.assertEqual(repo.has_downloads, True)
        self.assertEqual(repo.has_wiki, True)
        self.assertEqual(repo.has_pages, True)
        self.assertEqual(repo.has_discussions, False)
        self.assertEqual(repo.forks_count, 74)
        self.assertEqual(repo.mirror_url, None)
        self.assertEqual(repo.archived, False)
        self.assertEqual(repo.disabled, False)
        self.assertEqual(repo.open_issues_count, 3)
        self.assertEqual(repo.license.key, "gpl-3.0")
        self.assertEqual(repo.license.name, "GNU General Public License v3.0")
        self.assertEqual(repo.license.spdx_id, "GPL-3.0")
        self.assertEqual(
            repo.license.url, "https://api.github.com/licenses/gpl-3.0"
        )
        self.assertEqual(repo.license.node_id, "MDc6TGljZW5zZTk=")
        self.assertTrue(repo.allow_forking)
        self.assertFalse(repo.is_template)
        self.assertFalse(repo.web_commit_signoff_required)
        self.assertEqual(
            repo.topics,
            [
                "gmp",
                "gmp-scripts",
                "greenbone",
                "greenbone-vulnerability-manager",
                "gvm",
                "gvm-cli",
                "gvm-pyshell",
                "gvm-script",
                "omp",
                "openvas",
                "openvas-cli",
                "osp",
                "python",
                "vulnerability",
                "vulnerability-assessment",
                "vulnerability-management",
            ],
        )
        self.assertEqual(repo.visibility, "public")
        self.assertEqual(repo.forks, 74)
        self.assertEqual(repo.open_issues, 3)
        self.assertEqual(repo.watchers, 128)
        self.assertEqual(repo.default_branch, "main")
pontos-25.3.2/tests/github/models/test_pull_request.py000066400000000000000000002346231476255566300232260ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later
#

# pylint: disable=line-too-long, too-many-lines

import unittest
from datetime import datetime, timezone

from pontos.github.models.base import Permission, TeamPrivacy
from pontos.github.models.pull_request import (
    AuthorAssociation,
    Comment,
    MilestoneState,
    PullRequest,
    PullRequestState,
)


class PullRequestTestCase(unittest.TestCase):
    def test_from_dict(self):
        pr = PullRequest.from_dict(
            {
                "url": "https://api.github.com/repos/octocat/Hello-World/pulls/1347",
                "id": 1,
                "node_id": "MDExOlB1bGxSZXF1ZXN0MQ==",
                "html_url": "https://github.com/octocat/Hello-World/pull/1347",
                "diff_url": "https://github.com/octocat/Hello-World/pull/1347.diff",
                "patch_url": "https://github.com/octocat/Hello-World/pull/1347.patch",
                "issue_url": "https://api.github.com/repos/octocat/Hello-World/issues/1347",
                "commits_url": "https://api.github.com/repos/octocat/Hello-World/pulls/1347/commits",
                "review_comments_url": "https://api.github.com/repos/octocat/Hello-World/pulls/1347/comments",
                "review_comment_url": "https://api.github.com/repos/octocat/Hello-World/pulls/comments{/number}",
                "comments_url": "https://api.github.com/repos/octocat/Hello-World/issues/1347/comments",
                "statuses_url": "https://api.github.com/repos/octocat/Hello-World/statuses/6dcb09b5b57875f334f61aebed695e2e4193db5e",
                "number": 1347,
                "state": "open",
                "locked": True,
                "title": "Amazing new feature",
                "user": {
                    "login": "octocat",
                    "id": 1,
                    "node_id": "MDQ6VXNlcjE=",
                    "avatar_url": "https://github.com/images/error/octocat_happy.gif",
                    "gravatar_id": "",
                    "url": "https://api.github.com/users/octocat",
                    "html_url": "https://github.com/octocat",
                    "followers_url": "https://api.github.com/users/octocat/followers",
                    "following_url": "https://api.github.com/users/octocat/following{/other_user}",
                    "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
                    "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
                    "subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
                    "organizations_url": "https://api.github.com/users/octocat/orgs",
                    "repos_url": "https://api.github.com/users/octocat/repos",
                    "events_url": "https://api.github.com/users/octocat/events{/privacy}",
                    "received_events_url": "https://api.github.com/users/octocat/received_events",
                    "type": "User",
                    "site_admin": False,
                },
                "body": "Please pull these awesome changes in!",
                "labels": [
                    {
                        "id": 208045946,
                        "node_id": "MDU6TGFiZWwyMDgwNDU5NDY=",
                        "url": "https://api.github.com/repos/octocat/Hello-World/labels/bug",
                        "name": "bug",
                        "description": "Something isn't working",
                        "color": "f29513",
                        "default": True,
                    }
                ],
                "milestone": {
                    "url": "https://api.github.com/repos/octocat/Hello-World/milestones/1",
                    "html_url": "https://github.com/octocat/Hello-World/milestones/v1.0",
                    "labels_url": "https://api.github.com/repos/octocat/Hello-World/milestones/1/labels",
                    "id": 1002604,
                    "node_id": "MDk6TWlsZXN0b25lMTAwMjYwNA==",
                    "number": 1,
                    "state": "open",
                    "title": "v1.0",
                    "description": "Tracking milestone for version 1.0",
                    "creator": {
                        "login": "octocat",
                        "id": 1,
                        "node_id": "MDQ6VXNlcjE=",
                        "avatar_url": "https://github.com/images/error/octocat_happy.gif",
                        "gravatar_id": "",
                        "url": "https://api.github.com/users/octocat",
                        "html_url": "https://github.com/octocat",
                        "followers_url": "https://api.github.com/users/octocat/followers",
                        "following_url": "https://api.github.com/users/octocat/following{/other_user}",
                        "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
                        "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
                        "subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
                        "organizations_url": "https://api.github.com/users/octocat/orgs",
                        "repos_url": "https://api.github.com/users/octocat/repos",
                        "events_url": "https://api.github.com/users/octocat/events{/privacy}",
                        "received_events_url": "https://api.github.com/users/octocat/received_events",
                        "type": "User",
                        "site_admin": False,
                    },
                    "open_issues": 4,
                    "closed_issues": 8,
                    "created_at": "2011-04-10T20:09:31Z",
                    "updated_at": "2014-03-03T18:58:10Z",
                    "closed_at": "2013-02-12T13:22:01Z",
                    "due_on": "2012-10-09T23:39:01Z",
                },
                "active_lock_reason": "too heated",
                "created_at": "2011-01-26T19:01:12Z",
                "updated_at": "2011-01-26T19:01:12Z",
                "closed_at": "2011-01-26T19:01:12Z",
                "merged_at": "2011-01-26T19:01:12Z",
                "merge_commit_sha": "e5bd3914e2e596debea16f433f57875b5b90bcd6",
                "assignee": {
                    "login": "octocat",
                    "id": 1,
                    "node_id": "MDQ6VXNlcjE=",
                    "avatar_url": "https://github.com/images/error/octocat_happy.gif",
                    "gravatar_id": "",
                    "url": "https://api.github.com/users/octocat",
                    "html_url": "https://github.com/octocat",
                    "followers_url": "https://api.github.com/users/octocat/followers",
                    "following_url": "https://api.github.com/users/octocat/following{/other_user}",
                    "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
                    "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
                    "subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
                    "organizations_url": "https://api.github.com/users/octocat/orgs",
                    "repos_url": "https://api.github.com/users/octocat/repos",
                    "events_url": "https://api.github.com/users/octocat/events{/privacy}",
                    "received_events_url": "https://api.github.com/users/octocat/received_events",
                    "type": "User",
                    "site_admin": False,
                },
                "assignees": [
                    {
                        "login": "octocat",
                        "id": 1,
                        "node_id": "MDQ6VXNlcjE=",
                        "avatar_url": "https://github.com/images/error/octocat_happy.gif",
                        "gravatar_id": "",
                        "url": "https://api.github.com/users/octocat",
                        "html_url": "https://github.com/octocat",
                        "followers_url": "https://api.github.com/users/octocat/followers",
                        "following_url": "https://api.github.com/users/octocat/following{/other_user}",
                        "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
                        "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
                        "subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
                        "organizations_url": "https://api.github.com/users/octocat/orgs",
                        "repos_url": "https://api.github.com/users/octocat/repos",
                        "events_url": "https://api.github.com/users/octocat/events{/privacy}",
                        "received_events_url": "https://api.github.com/users/octocat/received_events",
                        "type": "User",
                        "site_admin": False,
                    },
                    {
                        "login": "hubot",
                        "id": 1,
                        "node_id": "MDQ6VXNlcjE=",
                        "avatar_url": "https://github.com/images/error/hubot_happy.gif",
                        "gravatar_id": "",
                        "url": "https://api.github.com/users/hubot",
                        "html_url": "https://github.com/hubot",
                        "followers_url": "https://api.github.com/users/hubot/followers",
                        "following_url": "https://api.github.com/users/hubot/following{/other_user}",
                        "gists_url": "https://api.github.com/users/hubot/gists{/gist_id}",
                        "starred_url": "https://api.github.com/users/hubot/starred{/owner}{/repo}",
                        "subscriptions_url": "https://api.github.com/users/hubot/subscriptions",
                        "organizations_url": "https://api.github.com/users/hubot/orgs",
                        "repos_url": "https://api.github.com/users/hubot/repos",
                        "events_url": "https://api.github.com/users/hubot/events{/privacy}",
                        "received_events_url": "https://api.github.com/users/hubot/received_events",
                        "type": "User",
                        "site_admin": True,
                    },
                ],
                "requested_reviewers": [
                    {
                        "login": "other_user",
                        "id": 1,
                        "node_id": "MDQ6VXNlcjE=",
                        "avatar_url": "https://github.com/images/error/other_user_happy.gif",
                        "gravatar_id": "",
                        "url": "https://api.github.com/users/other_user",
                        "html_url": "https://github.com/other_user",
                        "followers_url": "https://api.github.com/users/other_user/followers",
                        "following_url": "https://api.github.com/users/other_user/following{/other_user}",
                        "gists_url": "https://api.github.com/users/other_user/gists{/gist_id}",
                        "starred_url": "https://api.github.com/users/other_user/starred{/owner}{/repo}",
                        "subscriptions_url": "https://api.github.com/users/other_user/subscriptions",
                        "organizations_url": "https://api.github.com/users/other_user/orgs",
                        "repos_url": "https://api.github.com/users/other_user/repos",
                        "events_url": "https://api.github.com/users/other_user/events{/privacy}",
                        "received_events_url": "https://api.github.com/users/other_user/received_events",
                        "type": "User",
                        "site_admin": False,
                    }
                ],
                "requested_teams": [
                    {
                        "id": 1,
                        "node_id": "MDQ6VGVhbTE=",
                        "url": "https://api.github.com/teams/1",
                        "html_url": "https://github.com/orgs/github/teams/justice-league",
                        "name": "Justice League",
                        "slug": "justice-league",
                        "description": "A great team.",
                        "privacy": "closed",
                        "permission": "admin",
                        "members_url": "https://api.github.com/teams/1/members{/member}",
                        "repositories_url": "https://api.github.com/teams/1/repos",
                    }
                ],
                "head": {
                    "label": "octocat:new-topic",
                    "ref": "new-topic",
                    "sha": "6dcb09b5b57875f334f61aebed695e2e4193db5e",
                    "user": {
                        "login": "octocat",
                        "id": 1,
                        "node_id": "MDQ6VXNlcjE=",
                        "avatar_url": "https://github.com/images/error/octocat_happy.gif",
                        "gravatar_id": "",
                        "url": "https://api.github.com/users/octocat",
                        "html_url": "https://github.com/octocat",
                        "followers_url": "https://api.github.com/users/octocat/followers",
                        "following_url": "https://api.github.com/users/octocat/following{/other_user}",
                        "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
                        "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
                        "subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
                        "organizations_url": "https://api.github.com/users/octocat/orgs",
                        "repos_url": "https://api.github.com/users/octocat/repos",
                        "events_url": "https://api.github.com/users/octocat/events{/privacy}",
                        "received_events_url": "https://api.github.com/users/octocat/received_events",
                        "type": "User",
                        "site_admin": False,
                    },
                    "repo": {
                        "id": 1296269,
                        "node_id": "MDEwOlJlcG9zaXRvcnkxMjk2MjY5",
                        "name": "Hello-World",
                        "full_name": "octocat/Hello-World",
                        "owner": {
                            "login": "octocat",
                            "id": 1,
                            "node_id": "MDQ6VXNlcjE=",
                            "avatar_url": "https://github.com/images/error/octocat_happy.gif",
                            "gravatar_id": "",
                            "url": "https://api.github.com/users/octocat",
                            "html_url": "https://github.com/octocat",
                            "followers_url": "https://api.github.com/users/octocat/followers",
                            "following_url": "https://api.github.com/users/octocat/following{/other_user}",
                            "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
                            "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
                            "subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
                            "organizations_url": "https://api.github.com/users/octocat/orgs",
                            "repos_url": "https://api.github.com/users/octocat/repos",
                            "events_url": "https://api.github.com/users/octocat/events{/privacy}",
                            "received_events_url": "https://api.github.com/users/octocat/received_events",
                            "type": "User",
                            "site_admin": False,
                        },
                        "private": False,
                        "html_url": "https://github.com/octocat/Hello-World",
                        "description": "This your first repo!",
                        "fork": False,
                        "url": "https://api.github.com/repos/octocat/Hello-World",
                        "archive_url": "https://api.github.com/repos/octocat/Hello-World/{archive_format}{/ref}",
                        "assignees_url": "https://api.github.com/repos/octocat/Hello-World/assignees{/user}",
                        "blobs_url": "https://api.github.com/repos/octocat/Hello-World/git/blobs{/sha}",
                        "branches_url": "https://api.github.com/repos/octocat/Hello-World/branches{/branch}",
                        "collaborators_url": "https://api.github.com/repos/octocat/Hello-World/collaborators{/collaborator}",
                        "comments_url": "https://api.github.com/repos/octocat/Hello-World/comments{/number}",
                        "commits_url": "https://api.github.com/repos/octocat/Hello-World/commits{/sha}",
                        "compare_url": "https://api.github.com/repos/octocat/Hello-World/compare/{base}...{head}",
                        "contents_url": "https://api.github.com/repos/octocat/Hello-World/contents/{+path}",
                        "contributors_url": "https://api.github.com/repos/octocat/Hello-World/contributors",
                        "deployments_url": "https://api.github.com/repos/octocat/Hello-World/deployments",
                        "downloads_url": "https://api.github.com/repos/octocat/Hello-World/downloads",
                        "events_url": "https://api.github.com/repos/octocat/Hello-World/events",
                        "forks_url": "https://api.github.com/repos/octocat/Hello-World/forks",
                        "git_commits_url": "https://api.github.com/repos/octocat/Hello-World/git/commits{/sha}",
                        "git_refs_url": "https://api.github.com/repos/octocat/Hello-World/git/refs{/sha}",
                        "git_tags_url": "https://api.github.com/repos/octocat/Hello-World/git/tags{/sha}",
                        "git_url": "git:github.com/octocat/Hello-World.git",
                        "issue_comment_url": "https://api.github.com/repos/octocat/Hello-World/issues/comments{/number}",
                        "issue_events_url": "https://api.github.com/repos/octocat/Hello-World/issues/events{/number}",
                        "issues_url": "https://api.github.com/repos/octocat/Hello-World/issues{/number}",
                        "keys_url": "https://api.github.com/repos/octocat/Hello-World/keys{/key_id}",
                        "labels_url": "https://api.github.com/repos/octocat/Hello-World/labels{/name}",
                        "languages_url": "https://api.github.com/repos/octocat/Hello-World/languages",
                        "merges_url": "https://api.github.com/repos/octocat/Hello-World/merges",
                        "milestones_url": "https://api.github.com/repos/octocat/Hello-World/milestones{/number}",
                        "notifications_url": "https://api.github.com/repos/octocat/Hello-World/notifications{?since,all,participating}",
                        "pulls_url": "https://api.github.com/repos/octocat/Hello-World/pulls{/number}",
                        "releases_url": "https://api.github.com/repos/octocat/Hello-World/releases{/id}",
                        "ssh_url": "git@github.com:octocat/Hello-World.git",
                        "stargazers_url": "https://api.github.com/repos/octocat/Hello-World/stargazers",
                        "statuses_url": "https://api.github.com/repos/octocat/Hello-World/statuses/{sha}",
                        "subscribers_url": "https://api.github.com/repos/octocat/Hello-World/subscribers",
                        "subscription_url": "https://api.github.com/repos/octocat/Hello-World/subscription",
                        "tags_url": "https://api.github.com/repos/octocat/Hello-World/tags",
                        "teams_url": "https://api.github.com/repos/octocat/Hello-World/teams",
                        "trees_url": "https://api.github.com/repos/octocat/Hello-World/git/trees{/sha}",
                        "clone_url": "https://github.com/octocat/Hello-World.git",
                        "mirror_url": "git:git.example.com/octocat/Hello-World",
                        "hooks_url": "https://api.github.com/repos/octocat/Hello-World/hooks",
                        "svn_url": "https://svn.github.com/octocat/Hello-World",
                        "homepage": "https://github.com",
                        "language": None,
                        "forks_count": 9,
                        "stargazers_count": 80,
                        "watchers_count": 80,
                        "size": 108,
                        "default_branch": "master",
                        "open_issues_count": 0,
                        "topics": ["octocat", "atom", "electron", "api"],
                        "has_issues": True,
                        "has_projects": True,
                        "has_wiki": True,
                        "has_pages": False,
                        "has_downloads": True,
                        "has_discussions": False,
                        "archived": False,
                        "disabled": False,
                        "pushed_at": "2011-01-26T19:06:43Z",
                        "created_at": "2011-01-26T19:01:12Z",
                        "updated_at": "2011-01-26T19:14:43Z",
                        "permissions": {
                            "admin": False,
                            "push": False,
                            "pull": True,
                        },
                        "allow_rebase_merge": True,
                        "allow_squash_merge": True,
                        "allow_merge_commit": True,
                        "allow_forking": True,
                        "forks": 123,
                        "open_issues": 123,
                        "license": {
                            "key": "mit",
                            "name": "MIT License",
                            "url": "https://api.github.com/licenses/mit",
                            "spdx_id": "MIT",
                            "node_id": "MDc6TGljZW5zZW1pdA==",
                        },
                        "watchers": 123,
                    },
                },
                "base": {
                    "label": "octocat:master",
                    "ref": "master",
                    "sha": "6dcb09b5b57875f334f61aebed695e2e4193db5e",
                    "user": {
                        "login": "octocat",
                        "id": 1,
                        "node_id": "MDQ6VXNlcjE=",
                        "avatar_url": "https://github.com/images/error/octocat_happy.gif",
                        "gravatar_id": "",
                        "url": "https://api.github.com/users/octocat",
                        "html_url": "https://github.com/octocat",
                        "followers_url": "https://api.github.com/users/octocat/followers",
                        "following_url": "https://api.github.com/users/octocat/following{/other_user}",
                        "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
                        "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
                        "subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
                        "organizations_url": "https://api.github.com/users/octocat/orgs",
                        "repos_url": "https://api.github.com/users/octocat/repos",
                        "events_url": "https://api.github.com/users/octocat/events{/privacy}",
                        "received_events_url": "https://api.github.com/users/octocat/received_events",
                        "type": "User",
                        "site_admin": False,
                    },
                    "repo": {
                        "id": 1296269,
                        "node_id": "MDEwOlJlcG9zaXRvcnkxMjk2MjY5",
                        "name": "Hello-World",
                        "full_name": "octocat/Hello-World",
                        "owner": {
                            "login": "octocat",
                            "id": 1,
                            "node_id": "MDQ6VXNlcjE=",
                            "avatar_url": "https://github.com/images/error/octocat_happy.gif",
                            "gravatar_id": "",
                            "url": "https://api.github.com/users/octocat",
                            "html_url": "https://github.com/octocat",
                            "followers_url": "https://api.github.com/users/octocat/followers",
                            "following_url": "https://api.github.com/users/octocat/following{/other_user}",
                            "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
                            "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
                            "subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
                            "organizations_url": "https://api.github.com/users/octocat/orgs",
                            "repos_url": "https://api.github.com/users/octocat/repos",
                            "events_url": "https://api.github.com/users/octocat/events{/privacy}",
                            "received_events_url": "https://api.github.com/users/octocat/received_events",
                            "type": "User",
                            "site_admin": False,
                        },
                        "private": False,
                        "html_url": "https://github.com/octocat/Hello-World",
                        "description": "This your first repo!",
                        "fork": False,
                        "url": "https://api.github.com/repos/octocat/Hello-World",
                        "archive_url": "https://api.github.com/repos/octocat/Hello-World/{archive_format}{/ref}",
                        "assignees_url": "https://api.github.com/repos/octocat/Hello-World/assignees{/user}",
                        "blobs_url": "https://api.github.com/repos/octocat/Hello-World/git/blobs{/sha}",
                        "branches_url": "https://api.github.com/repos/octocat/Hello-World/branches{/branch}",
                        "collaborators_url": "https://api.github.com/repos/octocat/Hello-World/collaborators{/collaborator}",
                        "comments_url": "https://api.github.com/repos/octocat/Hello-World/comments{/number}",
                        "commits_url": "https://api.github.com/repos/octocat/Hello-World/commits{/sha}",
                        "compare_url": "https://api.github.com/repos/octocat/Hello-World/compare/{base}...{head}",
                        "contents_url": "https://api.github.com/repos/octocat/Hello-World/contents/{+path}",
                        "contributors_url": "https://api.github.com/repos/octocat/Hello-World/contributors",
                        "deployments_url": "https://api.github.com/repos/octocat/Hello-World/deployments",
                        "downloads_url": "https://api.github.com/repos/octocat/Hello-World/downloads",
                        "events_url": "https://api.github.com/repos/octocat/Hello-World/events",
                        "forks_url": "https://api.github.com/repos/octocat/Hello-World/forks",
                        "git_commits_url": "https://api.github.com/repos/octocat/Hello-World/git/commits{/sha}",
                        "git_refs_url": "https://api.github.com/repos/octocat/Hello-World/git/refs{/sha}",
                        "git_tags_url": "https://api.github.com/repos/octocat/Hello-World/git/tags{/sha}",
                        "git_url": "git:github.com/octocat/Hello-World.git",
                        "issue_comment_url": "https://api.github.com/repos/octocat/Hello-World/issues/comments{/number}",
                        "issue_events_url": "https://api.github.com/repos/octocat/Hello-World/issues/events{/number}",
                        "issues_url": "https://api.github.com/repos/octocat/Hello-World/issues{/number}",
                        "keys_url": "https://api.github.com/repos/octocat/Hello-World/keys{/key_id}",
                        "labels_url": "https://api.github.com/repos/octocat/Hello-World/labels{/name}",
                        "languages_url": "https://api.github.com/repos/octocat/Hello-World/languages",
                        "merges_url": "https://api.github.com/repos/octocat/Hello-World/merges",
                        "milestones_url": "https://api.github.com/repos/octocat/Hello-World/milestones{/number}",
                        "notifications_url": "https://api.github.com/repos/octocat/Hello-World/notifications{?since,all,participating}",
                        "pulls_url": "https://api.github.com/repos/octocat/Hello-World/pulls{/number}",
                        "releases_url": "https://api.github.com/repos/octocat/Hello-World/releases{/id}",
                        "ssh_url": "git@github.com:octocat/Hello-World.git",
                        "stargazers_url": "https://api.github.com/repos/octocat/Hello-World/stargazers",
                        "statuses_url": "https://api.github.com/repos/octocat/Hello-World/statuses/{sha}",
                        "subscribers_url": "https://api.github.com/repos/octocat/Hello-World/subscribers",
                        "subscription_url": "https://api.github.com/repos/octocat/Hello-World/subscription",
                        "tags_url": "https://api.github.com/repos/octocat/Hello-World/tags",
                        "teams_url": "https://api.github.com/repos/octocat/Hello-World/teams",
                        "trees_url": "https://api.github.com/repos/octocat/Hello-World/git/trees{/sha}",
                        "clone_url": "https://github.com/octocat/Hello-World.git",
                        "mirror_url": "git:git.example.com/octocat/Hello-World",
                        "hooks_url": "https://api.github.com/repos/octocat/Hello-World/hooks",
                        "svn_url": "https://svn.github.com/octocat/Hello-World",
                        "homepage": "https://github.com",
                        "language": None,
                        "forks_count": 9,
                        "stargazers_count": 80,
                        "watchers_count": 80,
                        "size": 108,
                        "default_branch": "master",
                        "open_issues_count": 0,
                        "topics": ["octocat", "atom", "electron", "api"],
                        "has_issues": True,
                        "has_projects": True,
                        "has_wiki": True,
                        "has_pages": False,
                        "has_downloads": True,
                        "has_discussions": False,
                        "archived": False,
                        "disabled": False,
                        "pushed_at": "2011-01-26T19:06:43Z",
                        "created_at": "2011-01-26T19:01:12Z",
                        "updated_at": "2011-01-26T19:14:43Z",
                        "permissions": {
                            "admin": False,
                            "push": False,
                            "pull": True,
                        },
                        "allow_rebase_merge": True,
                        "temp_clone_token": "ABTLWHOULUVAXGTRYU7OC2876QJ2O",
                        "allow_squash_merge": True,
                        "allow_merge_commit": True,
                        "allow_forking": True,
                        "forks": 123,
                        "open_issues": 123,
                        "license": {
                            "key": "mit",
                            "name": "MIT License",
                            "url": "https://api.github.com/licenses/mit",
                            "spdx_id": "MIT",
                            "node_id": "MDc6TGljZW5zZW1pdA==",
                        },
                        "watchers": 123,
                    },
                },
                "_links": {
                    "self": {
                        "href": "https://api.github.com/repos/octocat/Hello-World/pulls/1347"
                    },
                    "html": {
                        "href": "https://github.com/octocat/Hello-World/pull/1347"
                    },
                    "issue": {
                        "href": "https://api.github.com/repos/octocat/Hello-World/issues/1347"
                    },
                    "comments": {
                        "href": "https://api.github.com/repos/octocat/Hello-World/issues/1347/comments"
                    },
                    "review_comments": {
                        "href": "https://api.github.com/repos/octocat/Hello-World/pulls/1347/comments"
                    },
                    "review_comment": {
                        "href": "https://api.github.com/repos/octocat/Hello-World/pulls/comments{/number}"
                    },
                    "commits": {
                        "href": "https://api.github.com/repos/octocat/Hello-World/pulls/1347/commits"
                    },
                    "statuses": {
                        "href": "https://api.github.com/repos/octocat/Hello-World/statuses/6dcb09b5b57875f334f61aebed695e2e4193db5e"
                    },
                },
                "author_association": "OWNER",
                "auto_merge": None,
                "draft": False,
                "merged": False,
                "mergeable": True,
                "rebaseable": True,
                "mergeable_state": "clean",
                "merged_by": {
                    "login": "octocat",
                    "id": 1,
                    "node_id": "MDQ6VXNlcjE=",
                    "avatar_url": "https://github.com/images/error/octocat_happy.gif",
                    "gravatar_id": "",
                    "url": "https://api.github.com/users/octocat",
                    "html_url": "https://github.com/octocat",
                    "followers_url": "https://api.github.com/users/octocat/followers",
                    "following_url": "https://api.github.com/users/octocat/following{/other_user}",
                    "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
                    "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
                    "subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
                    "organizations_url": "https://api.github.com/users/octocat/orgs",
                    "repos_url": "https://api.github.com/users/octocat/repos",
                    "events_url": "https://api.github.com/users/octocat/events{/privacy}",
                    "received_events_url": "https://api.github.com/users/octocat/received_events",
                    "type": "User",
                    "site_admin": False,
                },
                "comments": 10,
                "review_comments": 0,
                "maintainer_can_modify": True,
                "commits": 3,
                "additions": 100,
                "deletions": 3,
                "changed_files": 5,
            }
        )

        self.assertEqual(
            pr.url,
            "https://api.github.com/repos/octocat/Hello-World/pulls/1347",
        )
        self.assertEqual(pr.id, 1)
        self.assertEqual(pr.node_id, "MDExOlB1bGxSZXF1ZXN0MQ==")
        self.assertEqual(
            pr.html_url, "https://github.com/octocat/Hello-World/pull/1347"
        )
        self.assertEqual(
            pr.diff_url, "https://github.com/octocat/Hello-World/pull/1347.diff"
        )
        self.assertEqual(
            pr.patch_url,
            "https://github.com/octocat/Hello-World/pull/1347.patch",
        )
        self.assertEqual(
            pr.issue_url,
            "https://api.github.com/repos/octocat/Hello-World/issues/1347",
        )
        self.assertEqual(
            pr.commits_url,
            "https://api.github.com/repos/octocat/Hello-World/pulls/1347/commits",
        )
        self.assertEqual(
            pr.review_comments_url,
            "https://api.github.com/repos/octocat/Hello-World/pulls/1347/comments",
        )
        self.assertEqual(
            pr.review_comment_url,
            "https://api.github.com/repos/octocat/Hello-World/pulls/comments{/number}",
        )
        self.assertEqual(
            pr.comments_url,
            "https://api.github.com/repos/octocat/Hello-World/issues/1347/comments",
        )
        self.assertEqual(
            pr.statuses_url,
            "https://api.github.com/repos/octocat/Hello-World/statuses/6dcb09b5b57875f334f61aebed695e2e4193db5e",
        )
        self.assertEqual(pr.number, 1347)
        self.assertEqual(pr.state, PullRequestState.OPEN)
        self.assertTrue(pr.locked)
        self.assertEqual(pr.title, "Amazing new feature")
        self.assertEqual(pr.body, "Please pull these awesome changes in!")
        self.assertEqual(pr.active_lock_reason, "too heated")
        self.assertEqual(
            pr.created_at, datetime(2011, 1, 26, 19, 1, 12, tzinfo=timezone.utc)
        )
        self.assertEqual(
            pr.updated_at, datetime(2011, 1, 26, 19, 1, 12, tzinfo=timezone.utc)
        )
        self.assertEqual(
            pr.closed_at, datetime(2011, 1, 26, 19, 1, 12, tzinfo=timezone.utc)
        )
        self.assertEqual(
            pr.merged_at, datetime(2011, 1, 26, 19, 1, 12, tzinfo=timezone.utc)
        )
        self.assertEqual(
            pr.merge_commit_sha, "e5bd3914e2e596debea16f433f57875b5b90bcd6"
        )
        self.assertEqual(pr.author_association, AuthorAssociation.OWNER)
        self.assertEqual(pr.auto_merge, None)
        self.assertEqual(pr.draft, False)
        self.assertEqual(pr.merged, False)
        self.assertEqual(pr.mergeable, True)
        self.assertEqual(pr.rebaseable, True)
        self.assertEqual(pr.mergeable_state, "clean")
        self.assertEqual(pr.comments, 10)
        self.assertEqual(pr.review_comments, 0)
        self.assertTrue(pr.maintainer_can_modify)
        self.assertEqual(pr.commits, 3)
        self.assertEqual(pr.additions, 100)
        self.assertEqual(pr.deletions, 3)
        self.assertEqual(pr.changed_files, 5)

        user = pr.user
        self.assertEqual(user.login, "octocat")
        self.assertEqual(user.id, 1)
        self.assertEqual(user.node_id, "MDQ6VXNlcjE=")
        self.assertEqual(
            user.avatar_url, "https://github.com/images/error/octocat_happy.gif"
        )
        self.assertEqual(user.gravatar_id, "")
        self.assertEqual(user.url, "https://api.github.com/users/octocat")
        self.assertEqual(user.html_url, "https://github.com/octocat")
        self.assertEqual(
            user.followers_url, "https://api.github.com/users/octocat/followers"
        )
        self.assertEqual(
            user.following_url,
            "https://api.github.com/users/octocat/following{/other_user}",
        )
        self.assertEqual(
            user.gists_url,
            "https://api.github.com/users/octocat/gists{/gist_id}",
        )
        self.assertEqual(
            user.starred_url,
            "https://api.github.com/users/octocat/starred{/owner}{/repo}",
        )
        self.assertEqual(
            user.subscriptions_url,
            "https://api.github.com/users/octocat/subscriptions",
        )
        self.assertEqual(
            user.organizations_url, "https://api.github.com/users/octocat/orgs"
        )
        self.assertEqual(
            user.repos_url, "https://api.github.com/users/octocat/repos"
        )
        self.assertEqual(
            user.events_url,
            "https://api.github.com/users/octocat/events{/privacy}",
        )
        self.assertEqual(
            user.received_events_url,
            "https://api.github.com/users/octocat/received_events",
        )
        self.assertEqual(user.type, "User")
        self.assertFalse(user.site_admin)

        self.assertEqual(len(pr.labels), 1)
        label = pr.labels[0]
        self.assertEqual(label.id, 208045946)
        self.assertEqual(label.node_id, "MDU6TGFiZWwyMDgwNDU5NDY=")
        self.assertEqual(
            label.url,
            "https://api.github.com/repos/octocat/Hello-World/labels/bug",
        )
        self.assertEqual(label.name, "bug")
        self.assertEqual(label.description, "Something isn't working")
        self.assertEqual(label.color, "f29513")
        self.assertTrue(label.default)

        milestone = pr.milestone
        self.assertEqual(
            milestone.url,
            "https://api.github.com/repos/octocat/Hello-World/milestones/1",
        )
        self.assertEqual(
            milestone.html_url,
            "https://github.com/octocat/Hello-World/milestones/v1.0",
        )
        self.assertEqual(
            milestone.labels_url,
            "https://api.github.com/repos/octocat/Hello-World/milestones/1/labels",
        )
        self.assertEqual(milestone.id, 1002604)
        self.assertEqual(milestone.node_id, "MDk6TWlsZXN0b25lMTAwMjYwNA==")
        self.assertEqual(milestone.number, 1)
        self.assertEqual(milestone.state, MilestoneState.OPEN)
        self.assertEqual(milestone.title, "v1.0")
        self.assertEqual(
            milestone.description, "Tracking milestone for version 1.0"
        )
        self.assertEqual(milestone.open_issues, 4)
        self.assertEqual(milestone.closed_issues, 8)
        self.assertEqual(
            milestone.created_at,
            datetime(2011, 4, 10, 20, 9, 31, tzinfo=timezone.utc),
        )
        self.assertEqual(
            milestone.updated_at,
            datetime(2014, 3, 3, 18, 58, 10, tzinfo=timezone.utc),
        )
        self.assertEqual(
            milestone.closed_at,
            datetime(2013, 2, 12, 13, 22, 1, tzinfo=timezone.utc),
        )
        self.assertEqual(
            milestone.due_on,
            datetime(2012, 10, 9, 23, 39, 1, tzinfo=timezone.utc),
        )

        creator = milestone.creator
        self.assertEqual(creator.login, "octocat")
        self.assertEqual(creator.id, 1)
        self.assertEqual(creator.node_id, "MDQ6VXNlcjE=")
        self.assertEqual(
            creator.avatar_url,
            "https://github.com/images/error/octocat_happy.gif",
        )
        self.assertEqual(creator.gravatar_id, "")
        self.assertEqual(creator.url, "https://api.github.com/users/octocat")
        self.assertEqual(creator.html_url, "https://github.com/octocat")
        self.assertEqual(
            creator.followers_url,
            "https://api.github.com/users/octocat/followers",
        )
        self.assertEqual(
            creator.following_url,
            "https://api.github.com/users/octocat/following{/other_user}",
        )
        self.assertEqual(
            creator.gists_url,
            "https://api.github.com/users/octocat/gists{/gist_id}",
        )
        self.assertEqual(
            creator.starred_url,
            "https://api.github.com/users/octocat/starred{/owner}{/repo}",
        )
        self.assertEqual(
            creator.subscriptions_url,
            "https://api.github.com/users/octocat/subscriptions",
        )
        self.assertEqual(
            creator.organizations_url,
            "https://api.github.com/users/octocat/orgs",
        )
        self.assertEqual(
            creator.repos_url, "https://api.github.com/users/octocat/repos"
        )
        self.assertEqual(
            creator.events_url,
            "https://api.github.com/users/octocat/events{/privacy}",
        )
        self.assertEqual(
            creator.received_events_url,
            "https://api.github.com/users/octocat/received_events",
        )
        self.assertEqual(creator.type, "User")
        self.assertEqual(creator.site_admin, False)

        user = pr.assignee
        self.assertEqual(user.login, "octocat")
        self.assertEqual(user.id, 1)
        self.assertEqual(user.node_id, "MDQ6VXNlcjE=")
        self.assertEqual(
            user.avatar_url, "https://github.com/images/error/octocat_happy.gif"
        )
        self.assertEqual(user.gravatar_id, "")
        self.assertEqual(user.url, "https://api.github.com/users/octocat")
        self.assertEqual(user.html_url, "https://github.com/octocat")
        self.assertEqual(
            user.followers_url, "https://api.github.com/users/octocat/followers"
        )
        self.assertEqual(
            user.following_url,
            "https://api.github.com/users/octocat/following{/other_user}",
        )
        self.assertEqual(
            user.gists_url,
            "https://api.github.com/users/octocat/gists{/gist_id}",
        )
        self.assertEqual(
            user.starred_url,
            "https://api.github.com/users/octocat/starred{/owner}{/repo}",
        )
        self.assertEqual(
            user.subscriptions_url,
            "https://api.github.com/users/octocat/subscriptions",
        )
        self.assertEqual(
            user.organizations_url, "https://api.github.com/users/octocat/orgs"
        )
        self.assertEqual(
            user.repos_url, "https://api.github.com/users/octocat/repos"
        )
        self.assertEqual(
            user.events_url,
            "https://api.github.com/users/octocat/events{/privacy}",
        )
        self.assertEqual(
            user.received_events_url,
            "https://api.github.com/users/octocat/received_events",
        )
        self.assertEqual(user.type, "User")
        self.assertFalse(user.site_admin)

        self.assertEqual(len(pr.assignees), 2)
        user = pr.assignees[0]
        self.assertEqual(user.login, "octocat")
        self.assertEqual(user.id, 1)
        self.assertEqual(user.node_id, "MDQ6VXNlcjE=")
        self.assertEqual(
            user.avatar_url, "https://github.com/images/error/octocat_happy.gif"
        )
        self.assertEqual(user.gravatar_id, "")
        self.assertEqual(user.url, "https://api.github.com/users/octocat")
        self.assertEqual(user.html_url, "https://github.com/octocat")
        self.assertEqual(
            user.followers_url, "https://api.github.com/users/octocat/followers"
        )
        self.assertEqual(
            user.following_url,
            "https://api.github.com/users/octocat/following{/other_user}",
        )
        self.assertEqual(
            user.gists_url,
            "https://api.github.com/users/octocat/gists{/gist_id}",
        )
        self.assertEqual(
            user.starred_url,
            "https://api.github.com/users/octocat/starred{/owner}{/repo}",
        )
        self.assertEqual(
            user.subscriptions_url,
            "https://api.github.com/users/octocat/subscriptions",
        )
        self.assertEqual(
            user.organizations_url, "https://api.github.com/users/octocat/orgs"
        )
        self.assertEqual(
            user.repos_url, "https://api.github.com/users/octocat/repos"
        )
        self.assertEqual(
            user.events_url,
            "https://api.github.com/users/octocat/events{/privacy}",
        )
        self.assertEqual(
            user.received_events_url,
            "https://api.github.com/users/octocat/received_events",
        )
        self.assertEqual(user.type, "User")
        self.assertFalse(user.site_admin)

        user = pr.assignees[1]
        self.assertEqual(user.login, "hubot")
        self.assertEqual(user.id, 1)
        self.assertEqual(user.node_id, "MDQ6VXNlcjE=")
        self.assertEqual(
            user.avatar_url, "https://github.com/images/error/hubot_happy.gif"
        )
        self.assertEqual(user.gravatar_id, "")
        self.assertEqual(user.url, "https://api.github.com/users/hubot")
        self.assertEqual(user.html_url, "https://github.com/hubot")
        self.assertEqual(
            user.followers_url, "https://api.github.com/users/hubot/followers"
        )
        self.assertEqual(
            user.following_url,
            "https://api.github.com/users/hubot/following{/other_user}",
        )
        self.assertEqual(
            user.gists_url, "https://api.github.com/users/hubot/gists{/gist_id}"
        )
        self.assertEqual(
            user.starred_url,
            "https://api.github.com/users/hubot/starred{/owner}{/repo}",
        )
        self.assertEqual(
            user.subscriptions_url,
            "https://api.github.com/users/hubot/subscriptions",
        )
        self.assertEqual(
            user.organizations_url, "https://api.github.com/users/hubot/orgs"
        )
        self.assertEqual(
            user.repos_url, "https://api.github.com/users/hubot/repos"
        )
        self.assertEqual(
            user.events_url,
            "https://api.github.com/users/hubot/events{/privacy}",
        )
        self.assertEqual(
            user.received_events_url,
            "https://api.github.com/users/hubot/received_events",
        )
        self.assertEqual(user.type, "User")
        self.assertTrue(user.site_admin)

        self.assertEqual(len(pr.requested_reviewers), 1)
        reviewer = pr.requested_reviewers[0]
        self.assertEqual(reviewer.login, "other_user")
        self.assertEqual(reviewer.id, 1)
        self.assertEqual(reviewer.node_id, "MDQ6VXNlcjE=")
        self.assertEqual(
            reviewer.avatar_url,
            "https://github.com/images/error/other_user_happy.gif",
        )
        self.assertEqual(reviewer.gravatar_id, "")
        self.assertEqual(
            reviewer.url, "https://api.github.com/users/other_user"
        )
        self.assertEqual(reviewer.html_url, "https://github.com/other_user")
        self.assertEqual(
            reviewer.followers_url,
            "https://api.github.com/users/other_user/followers",
        )
        self.assertEqual(
            reviewer.following_url,
            "https://api.github.com/users/other_user/following{/other_user}",
        )
        self.assertEqual(
            reviewer.gists_url,
            "https://api.github.com/users/other_user/gists{/gist_id}",
        )
        self.assertEqual(
            reviewer.starred_url,
            "https://api.github.com/users/other_user/starred{/owner}{/repo}",
        )
        self.assertEqual(
            reviewer.subscriptions_url,
            "https://api.github.com/users/other_user/subscriptions",
        )
        self.assertEqual(
            reviewer.organizations_url,
            "https://api.github.com/users/other_user/orgs",
        )
        self.assertEqual(
            reviewer.repos_url, "https://api.github.com/users/other_user/repos"
        )
        self.assertEqual(
            reviewer.events_url,
            "https://api.github.com/users/other_user/events{/privacy}",
        )
        self.assertEqual(
            reviewer.received_events_url,
            "https://api.github.com/users/other_user/received_events",
        )
        self.assertEqual(reviewer.type, "User")
        self.assertEqual(reviewer.site_admin, False)

        self.assertEqual(len(pr.requested_teams), 1)
        team = pr.requested_teams[0]
        self.assertEqual(team.id, 1)
        self.assertEqual(team.node_id, "MDQ6VGVhbTE=")
        self.assertEqual(team.url, "https://api.github.com/teams/1")
        self.assertEqual(
            team.html_url, "https://github.com/orgs/github/teams/justice-league"
        )
        self.assertEqual(team.name, "Justice League")
        self.assertEqual(team.slug, "justice-league")
        self.assertEqual(team.description, "A great team.")
        self.assertEqual(team.privacy, TeamPrivacy.CLOSED)
        self.assertEqual(team.permission, Permission.ADMIN)
        self.assertEqual(
            team.members_url, "https://api.github.com/teams/1/members{/member}"
        )
        self.assertEqual(
            team.repositories_url, "https://api.github.com/teams/1/repos"
        )

        head = pr.head
        self.assertEqual(head.label, "octocat:new-topic")
        self.assertEqual(head.ref, "new-topic")
        self.assertEqual(head.sha, "6dcb09b5b57875f334f61aebed695e2e4193db5e")

        user = head.user
        self.assertEqual(user.login, "octocat")
        self.assertEqual(user.id, 1)
        self.assertEqual(user.node_id, "MDQ6VXNlcjE=")
        self.assertEqual(
            user.avatar_url, "https://github.com/images/error/octocat_happy.gif"
        )
        self.assertEqual(user.gravatar_id, "")
        self.assertEqual(user.url, "https://api.github.com/users/octocat")
        self.assertEqual(user.html_url, "https://github.com/octocat")
        self.assertEqual(
            user.followers_url, "https://api.github.com/users/octocat/followers"
        )
        self.assertEqual(
            user.following_url,
            "https://api.github.com/users/octocat/following{/other_user}",
        )
        self.assertEqual(
            user.gists_url,
            "https://api.github.com/users/octocat/gists{/gist_id}",
        )
        self.assertEqual(
            user.starred_url,
            "https://api.github.com/users/octocat/starred{/owner}{/repo}",
        )
        self.assertEqual(
            user.subscriptions_url,
            "https://api.github.com/users/octocat/subscriptions",
        )
        self.assertEqual(
            user.organizations_url, "https://api.github.com/users/octocat/orgs"
        )
        self.assertEqual(
            user.repos_url, "https://api.github.com/users/octocat/repos"
        )
        self.assertEqual(
            user.events_url,
            "https://api.github.com/users/octocat/events{/privacy}",
        )
        self.assertEqual(
            user.received_events_url,
            "https://api.github.com/users/octocat/received_events",
        )
        self.assertEqual(user.type, "User")
        self.assertFalse(user.site_admin)

        repo = head.repo
        self.assertEqual(repo.id, 1296269)
        self.assertEqual(repo.node_id, "MDEwOlJlcG9zaXRvcnkxMjk2MjY5")
        self.assertEqual(repo.name, "Hello-World")
        self.assertEqual(repo.full_name, "octocat/Hello-World")
        self.assertFalse(repo.private)
        self.assertEqual(
            repo.html_url, "https://github.com/octocat/Hello-World"
        )
        self.assertEqual(repo.description, "This your first repo!")
        self.assertEqual(repo.fork, False)
        self.assertEqual(
            repo.url, "https://api.github.com/repos/octocat/Hello-World"
        )
        self.assertEqual(
            repo.archive_url,
            "https://api.github.com/repos/octocat/Hello-World/{archive_format}{/ref}",
        )
        self.assertEqual(
            repo.assignees_url,
            "https://api.github.com/repos/octocat/Hello-World/assignees{/user}",
        )
        self.assertEqual(
            repo.blobs_url,
            "https://api.github.com/repos/octocat/Hello-World/git/blobs{/sha}",
        )
        self.assertEqual(
            repo.branches_url,
            "https://api.github.com/repos/octocat/Hello-World/branches{/branch}",
        )
        self.assertEqual(
            repo.collaborators_url,
            "https://api.github.com/repos/octocat/Hello-World/collaborators{/collaborator}",
        )
        self.assertEqual(
            repo.comments_url,
            "https://api.github.com/repos/octocat/Hello-World/comments{/number}",
        )
        self.assertEqual(
            repo.commits_url,
            "https://api.github.com/repos/octocat/Hello-World/commits{/sha}",
        )
        self.assertEqual(
            repo.compare_url,
            "https://api.github.com/repos/octocat/Hello-World/compare/{base}...{head}",
        )
        self.assertEqual(
            repo.contents_url,
            "https://api.github.com/repos/octocat/Hello-World/contents/{+path}",
        )
        self.assertEqual(
            repo.contributors_url,
            "https://api.github.com/repos/octocat/Hello-World/contributors",
        )
        self.assertEqual(
            repo.deployments_url,
            "https://api.github.com/repos/octocat/Hello-World/deployments",
        )
        self.assertEqual(
            repo.downloads_url,
            "https://api.github.com/repos/octocat/Hello-World/downloads",
        )
        self.assertEqual(
            repo.events_url,
            "https://api.github.com/repos/octocat/Hello-World/events",
        )
        self.assertEqual(
            repo.forks_url,
            "https://api.github.com/repos/octocat/Hello-World/forks",
        )
        self.assertEqual(
            repo.git_commits_url,
            "https://api.github.com/repos/octocat/Hello-World/git/commits{/sha}",
        )
        self.assertEqual(
            repo.git_refs_url,
            "https://api.github.com/repos/octocat/Hello-World/git/refs{/sha}",
        )
        self.assertEqual(
            repo.git_tags_url,
            "https://api.github.com/repos/octocat/Hello-World/git/tags{/sha}",
        )
        self.assertEqual(repo.git_url, "git:github.com/octocat/Hello-World.git")
        self.assertEqual(
            repo.issue_comment_url,
            "https://api.github.com/repos/octocat/Hello-World/issues/comments{/number}",
        )
        self.assertEqual(
            repo.issue_events_url,
            "https://api.github.com/repos/octocat/Hello-World/issues/events{/number}",
        )
        self.assertEqual(
            repo.issues_url,
            "https://api.github.com/repos/octocat/Hello-World/issues{/number}",
        )
        self.assertEqual(
            repo.keys_url,
            "https://api.github.com/repos/octocat/Hello-World/keys{/key_id}",
        )
        self.assertEqual(
            repo.labels_url,
            "https://api.github.com/repos/octocat/Hello-World/labels{/name}",
        )
        self.assertEqual(
            repo.languages_url,
            "https://api.github.com/repos/octocat/Hello-World/languages",
        )
        self.assertEqual(
            repo.merges_url,
            "https://api.github.com/repos/octocat/Hello-World/merges",
        )
        self.assertEqual(
            repo.milestones_url,
            "https://api.github.com/repos/octocat/Hello-World/milestones{/number}",
        )
        self.assertEqual(
            repo.notifications_url,
            "https://api.github.com/repos/octocat/Hello-World/notifications{?since,all,participating}",
        )
        self.assertEqual(
            repo.pulls_url,
            "https://api.github.com/repos/octocat/Hello-World/pulls{/number}",
        )
        self.assertEqual(
            repo.releases_url,
            "https://api.github.com/repos/octocat/Hello-World/releases{/id}",
        )
        self.assertEqual(repo.ssh_url, "git@github.com:octocat/Hello-World.git")
        self.assertEqual(
            repo.stargazers_url,
            "https://api.github.com/repos/octocat/Hello-World/stargazers",
        )
        self.assertEqual(
            repo.statuses_url,
            "https://api.github.com/repos/octocat/Hello-World/statuses/{sha}",
        )
        self.assertEqual(
            repo.subscribers_url,
            "https://api.github.com/repos/octocat/Hello-World/subscribers",
        )
        self.assertEqual(
            repo.subscription_url,
            "https://api.github.com/repos/octocat/Hello-World/subscription",
        )
        self.assertEqual(
            repo.tags_url,
            "https://api.github.com/repos/octocat/Hello-World/tags",
        )
        self.assertEqual(
            repo.teams_url,
            "https://api.github.com/repos/octocat/Hello-World/teams",
        )
        self.assertEqual(
            repo.trees_url,
            "https://api.github.com/repos/octocat/Hello-World/git/trees{/sha}",
        )
        self.assertEqual(
            repo.clone_url, "https://github.com/octocat/Hello-World.git"
        )
        self.assertEqual(
            repo.mirror_url, "git:git.example.com/octocat/Hello-World"
        )
        self.assertEqual(
            repo.hooks_url,
            "https://api.github.com/repos/octocat/Hello-World/hooks",
        )
        self.assertEqual(
            repo.svn_url, "https://svn.github.com/octocat/Hello-World"
        )
        self.assertEqual(repo.homepage, "https://github.com")
        self.assertIsNone(repo.language)
        self.assertEqual(repo.forks_count, 9)
        self.assertEqual(repo.stargazers_count, 80)
        self.assertEqual(repo.watchers_count, 80)
        self.assertEqual(repo.size, 108)
        self.assertEqual(repo.default_branch, "master")
        self.assertEqual(repo.open_issues_count, 0)
        self.assertEqual(repo.topics, ["octocat", "atom", "electron", "api"])
        self.assertTrue(repo.has_issues)
        self.assertTrue(repo.has_projects)
        self.assertTrue(repo.has_wiki)
        self.assertFalse(repo.has_pages)
        self.assertTrue(repo.has_downloads)
        self.assertFalse(repo.has_discussions)
        self.assertFalse(repo.archived)
        self.assertFalse(repo.disabled)
        self.assertEqual(
            repo.pushed_at,
            datetime(2011, 1, 26, 19, 6, 43, tzinfo=timezone.utc),
        )
        self.assertEqual(
            repo.created_at,
            datetime(2011, 1, 26, 19, 1, 12, tzinfo=timezone.utc),
        )
        self.assertEqual(
            repo.updated_at,
            datetime(2011, 1, 26, 19, 14, 43, tzinfo=timezone.utc),
        )
        self.assertTrue(repo.allow_rebase_merge)
        self.assertTrue(repo.allow_squash_merge)
        self.assertTrue(repo.allow_merge_commit)
        self.assertTrue(repo.allow_forking)
        self.assertEqual(repo.forks, 123)
        self.assertEqual(repo.open_issues, 123)
        self.assertEqual(repo.watchers, 123)

        user = repo.owner
        self.assertEqual(user.login, "octocat")
        self.assertEqual(user.id, 1)
        self.assertEqual(user.node_id, "MDQ6VXNlcjE=")
        self.assertEqual(
            user.avatar_url, "https://github.com/images/error/octocat_happy.gif"
        )
        self.assertEqual(user.gravatar_id, "")
        self.assertEqual(user.url, "https://api.github.com/users/octocat")
        self.assertEqual(user.html_url, "https://github.com/octocat")
        self.assertEqual(
            user.followers_url, "https://api.github.com/users/octocat/followers"
        )
        self.assertEqual(
            user.following_url,
            "https://api.github.com/users/octocat/following{/other_user}",
        )
        self.assertEqual(
            user.gists_url,
            "https://api.github.com/users/octocat/gists{/gist_id}",
        )
        self.assertEqual(
            user.starred_url,
            "https://api.github.com/users/octocat/starred{/owner}{/repo}",
        )
        self.assertEqual(
            user.subscriptions_url,
            "https://api.github.com/users/octocat/subscriptions",
        )
        self.assertEqual(
            user.organizations_url, "https://api.github.com/users/octocat/orgs"
        )
        self.assertEqual(
            user.repos_url, "https://api.github.com/users/octocat/repos"
        )
        self.assertEqual(
            user.events_url,
            "https://api.github.com/users/octocat/events{/privacy}",
        )
        self.assertEqual(
            user.received_events_url,
            "https://api.github.com/users/octocat/received_events",
        )
        self.assertEqual(user.type, "User")
        self.assertFalse(user.site_admin)

        license = repo.license  # pylint: disable=redefined-builtin
        self.assertEqual(license.key, "mit")
        self.assertEqual(license.name, "MIT License")
        self.assertEqual(license.url, "https://api.github.com/licenses/mit")
        self.assertEqual(license.spdx_id, "MIT")
        self.assertEqual(license.node_id, "MDc6TGljZW5zZW1pdA==")

        self.assertFalse(repo.permissions.admin)
        self.assertFalse(repo.permissions.push)
        self.assertTrue(repo.permissions.pull)

        base = pr.base
        self.assertEqual(base.label, "octocat:master")
        self.assertEqual(base.ref, "master")
        self.assertEqual(base.sha, "6dcb09b5b57875f334f61aebed695e2e4193db5e")

        user = base.user
        self.assertEqual(user.login, "octocat")
        self.assertEqual(user.id, 1)
        self.assertEqual(user.node_id, "MDQ6VXNlcjE=")
        self.assertEqual(
            user.avatar_url, "https://github.com/images/error/octocat_happy.gif"
        )
        self.assertEqual(user.gravatar_id, "")
        self.assertEqual(user.url, "https://api.github.com/users/octocat")
        self.assertEqual(user.html_url, "https://github.com/octocat")
        self.assertEqual(
            user.followers_url, "https://api.github.com/users/octocat/followers"
        )
        self.assertEqual(
            user.following_url,
            "https://api.github.com/users/octocat/following{/other_user}",
        )
        self.assertEqual(
            user.gists_url,
            "https://api.github.com/users/octocat/gists{/gist_id}",
        )
        self.assertEqual(
            user.starred_url,
            "https://api.github.com/users/octocat/starred{/owner}{/repo}",
        )
        self.assertEqual(
            user.subscriptions_url,
            "https://api.github.com/users/octocat/subscriptions",
        )
        self.assertEqual(
            user.organizations_url, "https://api.github.com/users/octocat/orgs"
        )
        self.assertEqual(
            user.repos_url, "https://api.github.com/users/octocat/repos"
        )
        self.assertEqual(
            user.events_url,
            "https://api.github.com/users/octocat/events{/privacy}",
        )
        self.assertEqual(
            user.received_events_url,
            "https://api.github.com/users/octocat/received_events",
        )
        self.assertEqual(user.type, "User")
        self.assertFalse(user.site_admin)

        repo = base.repo
        self.assertEqual(repo.id, 1296269)
        self.assertEqual(repo.node_id, "MDEwOlJlcG9zaXRvcnkxMjk2MjY5")
        self.assertEqual(repo.name, "Hello-World")
        self.assertEqual(repo.full_name, "octocat/Hello-World")
        self.assertFalse(repo.private)
        self.assertEqual(
            repo.html_url, "https://github.com/octocat/Hello-World"
        )
        self.assertEqual(repo.description, "This your first repo!")
        self.assertEqual(repo.fork, False)
        self.assertEqual(
            repo.url, "https://api.github.com/repos/octocat/Hello-World"
        )
        self.assertEqual(
            repo.archive_url,
            "https://api.github.com/repos/octocat/Hello-World/{archive_format}{/ref}",
        )
        self.assertEqual(
            repo.assignees_url,
            "https://api.github.com/repos/octocat/Hello-World/assignees{/user}",
        )
        self.assertEqual(
            repo.blobs_url,
            "https://api.github.com/repos/octocat/Hello-World/git/blobs{/sha}",
        )
        self.assertEqual(
            repo.branches_url,
            "https://api.github.com/repos/octocat/Hello-World/branches{/branch}",
        )
        self.assertEqual(
            repo.collaborators_url,
            "https://api.github.com/repos/octocat/Hello-World/collaborators{/collaborator}",
        )
        self.assertEqual(
            repo.comments_url,
            "https://api.github.com/repos/octocat/Hello-World/comments{/number}",
        )
        self.assertEqual(
            repo.commits_url,
            "https://api.github.com/repos/octocat/Hello-World/commits{/sha}",
        )
        self.assertEqual(
            repo.compare_url,
            "https://api.github.com/repos/octocat/Hello-World/compare/{base}...{head}",
        )
        self.assertEqual(
            repo.contents_url,
            "https://api.github.com/repos/octocat/Hello-World/contents/{+path}",
        )
        self.assertEqual(
            repo.contributors_url,
            "https://api.github.com/repos/octocat/Hello-World/contributors",
        )
        self.assertEqual(
            repo.deployments_url,
            "https://api.github.com/repos/octocat/Hello-World/deployments",
        )
        self.assertEqual(
            repo.downloads_url,
            "https://api.github.com/repos/octocat/Hello-World/downloads",
        )
        self.assertEqual(
            repo.events_url,
            "https://api.github.com/repos/octocat/Hello-World/events",
        )
        self.assertEqual(
            repo.forks_url,
            "https://api.github.com/repos/octocat/Hello-World/forks",
        )
        self.assertEqual(
            repo.git_commits_url,
            "https://api.github.com/repos/octocat/Hello-World/git/commits{/sha}",
        )
        self.assertEqual(
            repo.git_refs_url,
            "https://api.github.com/repos/octocat/Hello-World/git/refs{/sha}",
        )
        self.assertEqual(
            repo.git_tags_url,
            "https://api.github.com/repos/octocat/Hello-World/git/tags{/sha}",
        )
        self.assertEqual(repo.git_url, "git:github.com/octocat/Hello-World.git")
        self.assertEqual(
            repo.issue_comment_url,
            "https://api.github.com/repos/octocat/Hello-World/issues/comments{/number}",
        )
        self.assertEqual(
            repo.issue_events_url,
            "https://api.github.com/repos/octocat/Hello-World/issues/events{/number}",
        )
        self.assertEqual(
            repo.issues_url,
            "https://api.github.com/repos/octocat/Hello-World/issues{/number}",
        )
        self.assertEqual(
            repo.keys_url,
            "https://api.github.com/repos/octocat/Hello-World/keys{/key_id}",
        )
        self.assertEqual(
            repo.labels_url,
            "https://api.github.com/repos/octocat/Hello-World/labels{/name}",
        )
        self.assertEqual(
            repo.languages_url,
            "https://api.github.com/repos/octocat/Hello-World/languages",
        )
        self.assertEqual(
            repo.merges_url,
            "https://api.github.com/repos/octocat/Hello-World/merges",
        )
        self.assertEqual(
            repo.milestones_url,
            "https://api.github.com/repos/octocat/Hello-World/milestones{/number}",
        )
        self.assertEqual(
            repo.notifications_url,
            "https://api.github.com/repos/octocat/Hello-World/notifications{?since,all,participating}",
        )
        self.assertEqual(
            repo.pulls_url,
            "https://api.github.com/repos/octocat/Hello-World/pulls{/number}",
        )
        self.assertEqual(
            repo.releases_url,
            "https://api.github.com/repos/octocat/Hello-World/releases{/id}",
        )
        self.assertEqual(repo.ssh_url, "git@github.com:octocat/Hello-World.git")
        self.assertEqual(
            repo.stargazers_url,
            "https://api.github.com/repos/octocat/Hello-World/stargazers",
        )
        self.assertEqual(
            repo.statuses_url,
            "https://api.github.com/repos/octocat/Hello-World/statuses/{sha}",
        )
        self.assertEqual(
            repo.subscribers_url,
            "https://api.github.com/repos/octocat/Hello-World/subscribers",
        )
        self.assertEqual(
            repo.subscription_url,
            "https://api.github.com/repos/octocat/Hello-World/subscription",
        )
        self.assertEqual(
            repo.tags_url,
            "https://api.github.com/repos/octocat/Hello-World/tags",
        )
        self.assertEqual(
            repo.teams_url,
            "https://api.github.com/repos/octocat/Hello-World/teams",
        )
        self.assertEqual(
            repo.trees_url,
            "https://api.github.com/repos/octocat/Hello-World/git/trees{/sha}",
        )
        self.assertEqual(
            repo.clone_url, "https://github.com/octocat/Hello-World.git"
        )
        self.assertEqual(
            repo.mirror_url, "git:git.example.com/octocat/Hello-World"
        )
        self.assertEqual(
            repo.hooks_url,
            "https://api.github.com/repos/octocat/Hello-World/hooks",
        )
        self.assertEqual(
            repo.svn_url, "https://svn.github.com/octocat/Hello-World"
        )
        self.assertEqual(repo.homepage, "https://github.com")
        self.assertIsNone(repo.language)
        self.assertEqual(repo.forks_count, 9)
        self.assertEqual(repo.stargazers_count, 80)
        self.assertEqual(repo.watchers_count, 80)
        self.assertEqual(repo.size, 108)
        self.assertEqual(repo.default_branch, "master")
        self.assertEqual(repo.open_issues_count, 0)
        self.assertEqual(repo.topics, ["octocat", "atom", "electron", "api"])
        self.assertTrue(repo.has_issues)
        self.assertTrue(repo.has_projects)
        self.assertTrue(repo.has_wiki)
        self.assertFalse(repo.has_pages)
        self.assertTrue(repo.has_downloads)
        self.assertFalse(repo.has_discussions)
        self.assertFalse(repo.archived)
        self.assertFalse(repo.disabled)
        self.assertEqual(
            repo.pushed_at,
            datetime(2011, 1, 26, 19, 6, 43, tzinfo=timezone.utc),
        )
        self.assertEqual(
            repo.created_at,
            datetime(2011, 1, 26, 19, 1, 12, tzinfo=timezone.utc),
        )
        self.assertEqual(
            repo.updated_at,
            datetime(2011, 1, 26, 19, 14, 43, tzinfo=timezone.utc),
        )
        self.assertTrue(repo.allow_rebase_merge)
        self.assertTrue(repo.allow_squash_merge)
        self.assertTrue(repo.allow_merge_commit)
        self.assertTrue(repo.allow_forking)
        self.assertEqual(repo.forks, 123)
        self.assertEqual(repo.open_issues, 123)
        self.assertEqual(repo.watchers, 123)

        user = pr.merged_by
        self.assertEqual(user.login, "octocat")
        self.assertEqual(user.id, 1)
        self.assertEqual(user.node_id, "MDQ6VXNlcjE=")
        self.assertEqual(
            user.avatar_url, "https://github.com/images/error/octocat_happy.gif"
        )
        self.assertEqual(user.gravatar_id, "")
        self.assertEqual(user.url, "https://api.github.com/users/octocat")
        self.assertEqual(user.html_url, "https://github.com/octocat")
        self.assertEqual(
            user.followers_url, "https://api.github.com/users/octocat/followers"
        )
        self.assertEqual(
            user.following_url,
            "https://api.github.com/users/octocat/following{/other_user}",
        )
        self.assertEqual(
            user.gists_url,
            "https://api.github.com/users/octocat/gists{/gist_id}",
        )
        self.assertEqual(
            user.starred_url,
            "https://api.github.com/users/octocat/starred{/owner}{/repo}",
        )
        self.assertEqual(
            user.subscriptions_url,
            "https://api.github.com/users/octocat/subscriptions",
        )
        self.assertEqual(
            user.organizations_url, "https://api.github.com/users/octocat/orgs"
        )
        self.assertEqual(
            user.repos_url, "https://api.github.com/users/octocat/repos"
        )
        self.assertEqual(
            user.events_url,
            "https://api.github.com/users/octocat/events{/privacy}",
        )
        self.assertEqual(
            user.received_events_url,
            "https://api.github.com/users/octocat/received_events",
        )
        self.assertEqual(user.type, "User")
        self.assertFalse(user.site_admin)


class CommentTestCase(unittest.TestCase):
    def test_from_dict(self):
        data = {
            "id": 1,
            "node_id": "MDEyOklzc3VlQ29tbWVudDE=",
            "url": "https://api.github.com/repos/octocat/Hello-World/issues/comments/1",
            "html_url": "https://github.com/octocat/Hello-World/issues/1347#issuecomment-1",
            "body": "Me too",
            "user": {
                "login": "octocat",
                "id": 1,
                "node_id": "MDQ6VXNlcjE=",
                "avatar_url": "https://github.com/images/error/octocat_happy.gif",
                "gravatar_id": "",
                "url": "https://api.github.com/users/octocat",
                "html_url": "https://github.com/octocat",
                "followers_url": "https://api.github.com/users/octocat/followers",
                "following_url": "https://api.github.com/users/octocat/following{/other_user}",
                "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
                "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
                "subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
                "organizations_url": "https://api.github.com/users/octocat/orgs",
                "repos_url": "https://api.github.com/users/octocat/repos",
                "events_url": "https://api.github.com/users/octocat/events{/privacy}",
                "received_events_url": "https://api.github.com/users/octocat/received_events",
                "type": "User",
                "site_admin": False,
            },
            "created_at": "2011-04-14T16:00:49Z",
            "updated_at": "2011-04-14T16:00:49Z",
            "issue_url": "https://api.github.com/repos/octocat/Hello-World/issues/1347",
            "author_association": "COLLABORATOR",
        }
        comment = Comment.from_dict(data)

        self.assertEqual(comment.id, 1)
        self.assertEqual(comment.node_id, "MDEyOklzc3VlQ29tbWVudDE=")
        self.assertEqual(
            comment.url,
            "https://api.github.com/repos/octocat/Hello-World/issues/comments/1",
        )
        self.assertEqual(
            comment.html_url,
            "https://github.com/octocat/Hello-World/issues/1347#issuecomment-1",
        )
        self.assertEqual(comment.body, "Me too")

        user = comment.user
        self.assertEqual(user.login, "octocat")
        self.assertEqual(user.id, 1)
        self.assertEqual(user.node_id, "MDQ6VXNlcjE=")
        self.assertEqual(
            user.avatar_url, "https://github.com/images/error/octocat_happy.gif"
        )
        self.assertEqual(user.gravatar_id, "")
        self.assertEqual(user.url, "https://api.github.com/users/octocat")
        self.assertEqual(user.html_url, "https://github.com/octocat")
        self.assertEqual(
            user.followers_url, "https://api.github.com/users/octocat/followers"
        )
        self.assertEqual(
            user.following_url,
            "https://api.github.com/users/octocat/following{/other_user}",
        )
        self.assertEqual(
            user.gists_url,
            "https://api.github.com/users/octocat/gists{/gist_id}",
        )
        self.assertEqual(
            user.starred_url,
            "https://api.github.com/users/octocat/starred{/owner}{/repo}",
        )
        self.assertEqual(
            user.subscriptions_url,
            "https://api.github.com/users/octocat/subscriptions",
        )
        self.assertEqual(
            user.organizations_url, "https://api.github.com/users/octocat/orgs"
        )
        self.assertEqual(
            user.repos_url, "https://api.github.com/users/octocat/repos"
        )
        self.assertEqual(
            user.events_url,
            "https://api.github.com/users/octocat/events{/privacy}",
        )
        self.assertEqual(
            user.received_events_url,
            "https://api.github.com/users/octocat/received_events",
        )
        self.assertEqual(user.type, "User")
        self.assertFalse(user.site_admin)

        self.assertEqual(
            comment.issue_url,
            "https://api.github.com/repos/octocat/Hello-World/issues/1347",
        )
        self.assertEqual(
            comment.author_association, AuthorAssociation.COLLABORATOR
        )
pontos-25.3.2/tests/github/models/test_release.py000066400000000000000000000262631476255566300221210ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later
#

# pylint: disable=line-too-long

import unittest
from datetime import datetime, timezone

from pontos.github.models.release import Release, ReleaseAssetState


class ReleaseTestCase(unittest.TestCase):
    def test_from_dict(self):
        release = Release.from_dict(
            {
                "url": "https://api.github.com/repos/octocat/Hello-World/releases/1",
                "html_url": "https://github.com/octocat/Hello-World/releases/v1.0.0",
                "assets_url": "https://api.github.com/repos/octocat/Hello-World/releases/1/assets",
                "upload_url": "https://uploads.github.com/repos/octocat/Hello-World/releases/1/assets{?name,label}",
                "tarball_url": "https://api.github.com/repos/octocat/Hello-World/tarball/v1.0.0",
                "zipball_url": "https://api.github.com/repos/octocat/Hello-World/zipball/v1.0.0",
                "discussion_url": "https://github.com/octocat/Hello-World/discussions/90",
                "id": 1,
                "node_id": "MDc6UmVsZWFzZTE=",
                "tag_name": "v1.0.0",
                "target_commitish": "master",
                "name": "v1.0.0",
                "body": "Description of the release",
                "draft": False,
                "prerelease": False,
                "created_at": "2013-02-27T19:35:32Z",
                "published_at": "2013-02-27T19:35:32Z",
                "author": {
                    "login": "octocat",
                    "id": 1,
                    "node_id": "MDQ6VXNlcjE=",
                    "avatar_url": "https://github.com/images/error/octocat_happy.gif",
                    "gravatar_id": "",
                    "url": "https://api.github.com/users/octocat",
                    "html_url": "https://github.com/octocat",
                    "followers_url": "https://api.github.com/users/octocat/followers",
                    "following_url": "https://api.github.com/users/octocat/following{/other_user}",
                    "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
                    "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
                    "subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
                    "organizations_url": "https://api.github.com/users/octocat/orgs",
                    "repos_url": "https://api.github.com/users/octocat/repos",
                    "events_url": "https://api.github.com/users/octocat/events{/privacy}",
                    "received_events_url": "https://api.github.com/users/octocat/received_events",
                    "type": "User",
                    "site_admin": False,
                },
                "assets": [
                    {
                        "url": "https://api.github.com/repos/octocat/Hello-World/releases/assets/1",
                        "browser_download_url": "https://github.com/octocat/Hello-World/releases/download/v1.0.0/example.zip",
                        "id": 1,
                        "node_id": "MDEyOlJlbGVhc2VBc3NldDE=",
                        "name": "example.zip",
                        "label": "short description",
                        "state": "uploaded",
                        "content_type": "application/zip",
                        "size": 1024,
                        "download_count": 42,
                        "created_at": "2013-02-27T19:35:32Z",
                        "updated_at": "2013-02-27T19:35:32Z",
                        "uploader": {
                            "login": "octocat",
                            "id": 1,
                            "node_id": "MDQ6VXNlcjE=",
                            "avatar_url": "https://github.com/images/error/octocat_happy.gif",
                            "gravatar_id": "",
                            "url": "https://api.github.com/users/octocat",
                            "html_url": "https://github.com/octocat",
                            "followers_url": "https://api.github.com/users/octocat/followers",
                            "following_url": "https://api.github.com/users/octocat/following{/other_user}",
                            "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
                            "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
                            "subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
                            "organizations_url": "https://api.github.com/users/octocat/orgs",
                            "repos_url": "https://api.github.com/users/octocat/repos",
                            "events_url": "https://api.github.com/users/octocat/events{/privacy}",
                            "received_events_url": "https://api.github.com/users/octocat/received_events",
                            "type": "User",
                            "site_admin": False,
                        },
                    }
                ],
            }
        )

        self.assertEqual(
            release.url,
            "https://api.github.com/repos/octocat/Hello-World/releases/1",
        )
        self.assertEqual(
            release.html_url,
            "https://github.com/octocat/Hello-World/releases/v1.0.0",
        )
        self.assertEqual(
            release.assets_url,
            "https://api.github.com/repos/octocat/Hello-World/releases/1/assets",
        )
        self.assertEqual(
            release.upload_url,
            "https://uploads.github.com/repos/octocat/Hello-World/releases/1/assets{?name,label}",
        )
        self.assertEqual(
            release.tarball_url,
            "https://api.github.com/repos/octocat/Hello-World/tarball/v1.0.0",
        )
        self.assertEqual(
            release.zipball_url,
            "https://api.github.com/repos/octocat/Hello-World/zipball/v1.0.0",
        )
        self.assertEqual(
            release.discussion_url,
            "https://github.com/octocat/Hello-World/discussions/90",
        )
        self.assertEqual(release.id, 1)
        self.assertEqual(release.node_id, "MDc6UmVsZWFzZTE=")
        self.assertEqual(release.tag_name, "v1.0.0")
        self.assertEqual(release.target_commitish, "master")
        self.assertEqual(release.name, "v1.0.0")
        self.assertEqual(release.body, "Description of the release")
        self.assertFalse(release.draft)
        self.assertFalse(release.prerelease)
        self.assertEqual(
            release.created_at,
            datetime(2013, 2, 27, 19, 35, 32, tzinfo=timezone.utc),
        )
        self.assertEqual(
            release.published_at,
            datetime(2013, 2, 27, 19, 35, 32, tzinfo=timezone.utc),
        )

        user = release.author
        self.assertEqual(user.login, "octocat")
        self.assertEqual(user.id, 1)
        self.assertEqual(user.node_id, "MDQ6VXNlcjE=")
        self.assertEqual(
            user.avatar_url, "https://github.com/images/error/octocat_happy.gif"
        )
        self.assertEqual(user.gravatar_id, "")
        self.assertEqual(user.url, "https://api.github.com/users/octocat")
        self.assertEqual(user.html_url, "https://github.com/octocat")
        self.assertEqual(
            user.followers_url, "https://api.github.com/users/octocat/followers"
        )
        self.assertEqual(
            user.following_url,
            "https://api.github.com/users/octocat/following{/other_user}",
        )
        self.assertEqual(
            user.gists_url,
            "https://api.github.com/users/octocat/gists{/gist_id}",
        )
        self.assertEqual(
            user.starred_url,
            "https://api.github.com/users/octocat/starred{/owner}{/repo}",
        )
        self.assertEqual(
            user.subscriptions_url,
            "https://api.github.com/users/octocat/subscriptions",
        )
        self.assertEqual(
            user.organizations_url, "https://api.github.com/users/octocat/orgs"
        )
        self.assertEqual(
            user.repos_url, "https://api.github.com/users/octocat/repos"
        )
        self.assertEqual(
            user.events_url,
            "https://api.github.com/users/octocat/events{/privacy}",
        )
        self.assertEqual(
            user.received_events_url,
            "https://api.github.com/users/octocat/received_events",
        )
        self.assertEqual(user.type, "User")
        self.assertEqual(user.site_admin, False)

        self.assertEqual(len(release.assets), 1)
        asset = release.assets[0]
        self.assertEqual(
            asset.url,
            "https://api.github.com/repos/octocat/Hello-World/releases/assets/1",
        )
        self.assertEqual(
            asset.browser_download_url,
            "https://github.com/octocat/Hello-World/releases/download/v1.0.0/example.zip",
        )
        self.assertEqual(asset.id, 1)
        self.assertEqual(asset.node_id, "MDEyOlJlbGVhc2VBc3NldDE=")
        self.assertEqual(asset.name, "example.zip")
        self.assertEqual(asset.label, "short description")
        self.assertEqual(asset.state, ReleaseAssetState.UPLOADED)
        self.assertEqual(asset.content_type, "application/zip")
        self.assertEqual(asset.size, 1024)
        self.assertEqual(asset.download_count, 42)
        self.assertEqual(
            asset.created_at,
            datetime(2013, 2, 27, 19, 35, 32, tzinfo=timezone.utc),
        )
        self.assertEqual(
            asset.updated_at,
            datetime(2013, 2, 27, 19, 35, 32, tzinfo=timezone.utc),
        )

        user = asset.uploader
        self.assertEqual(user.login, "octocat")
        self.assertEqual(user.id, 1)
        self.assertEqual(user.node_id, "MDQ6VXNlcjE=")
        self.assertEqual(
            user.avatar_url, "https://github.com/images/error/octocat_happy.gif"
        )
        self.assertEqual(user.gravatar_id, "")
        self.assertEqual(user.url, "https://api.github.com/users/octocat")
        self.assertEqual(user.html_url, "https://github.com/octocat")
        self.assertEqual(
            user.followers_url, "https://api.github.com/users/octocat/followers"
        )
        self.assertEqual(
            user.following_url,
            "https://api.github.com/users/octocat/following{/other_user}",
        )
        self.assertEqual(
            user.gists_url,
            "https://api.github.com/users/octocat/gists{/gist_id}",
        )
        self.assertEqual(
            user.starred_url,
            "https://api.github.com/users/octocat/starred{/owner}{/repo}",
        )
        self.assertEqual(
            user.subscriptions_url,
            "https://api.github.com/users/octocat/subscriptions",
        )
        self.assertEqual(
            user.organizations_url, "https://api.github.com/users/octocat/orgs"
        )
        self.assertEqual(
            user.repos_url, "https://api.github.com/users/octocat/repos"
        )
        self.assertEqual(
            user.events_url,
            "https://api.github.com/users/octocat/events{/privacy}",
        )
        self.assertEqual(
            user.received_events_url,
            "https://api.github.com/users/octocat/received_events",
        )
        self.assertEqual(user.type, "User")
        self.assertEqual(user.site_admin, False)
pontos-25.3.2/tests/github/models/test_search.py000066400000000000000000000030751476255566300217420ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later
#

# pylint: disable=line-too-long, invalid-name

import unittest

from pontos.github.models.search import (
    InDescriptionQualifier,
    InNameQualifier,
    InReadmeQualifier,
    InTopicsQualifier,
    IsPrivateQualifier,
    IsPublicQualifier,
    NotQualifier,
    OrganizationQualifier,
    RepositoryQualifier,
    UserQualifier,
)


class QualifierTestCase(unittest.TestCase):
    def test_name(self):
        q = InNameQualifier()
        self.assertEqual(str(q), "in:name")

    def test_description(self):
        q = InDescriptionQualifier()
        self.assertEqual(str(q), "in:description")

    def test_topics(self):
        q = InTopicsQualifier()
        self.assertEqual(str(q), "in:topics")

    def test_readme(self):
        q = InReadmeQualifier()
        self.assertEqual(str(q), "in:readme")

    def test_not(self):
        q = NotQualifier(InNameQualifier())
        self.assertEqual(str(q), "-in:name")

    def test_repository(self):
        q = RepositoryQualifier("foo/bar")
        self.assertEqual(str(q), "repo:foo/bar")

    def test_organization(self):
        q = OrganizationQualifier("foo")
        self.assertEqual(str(q), "org:foo")

    def test_user(self):
        q = UserQualifier("foo")
        self.assertEqual(str(q), "user:foo")

    def test_is_public(self):
        q = IsPublicQualifier()
        self.assertEqual(str(q), "is:public")

    def test_is_private(self):
        q = IsPrivateQualifier()
        self.assertEqual(str(q), "is:private")
pontos-25.3.2/tests/github/models/test_secret_scanning.py000066400000000000000000000231371476255566300236430ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later

# ruff: noqa:E501

import unittest
from datetime import datetime, timezone

from pontos.github.models.secret_scanning import (
    AlertState,
    Resolution,
    SecretScanningAlert,
)

ALERT = {
    "number": 2,
    "created_at": "2020-11-06T18:48:51Z",
    "url": "https://api.github.com/repos/owner/private-repo/secret-scanning/alerts/2",
    "html_url": "https://github.com/owner/private-repo/security/secret-scanning/2",
    "locations_url": "https://api.github.com/repos/owner/private-repo/secret-scanning/alerts/2/locations",
    "state": "resolved",
    "resolution": "false_positive",
    "resolved_at": "2020-11-07T02:47:13Z",
    "resolved_by": {
        "login": "monalisa",
        "id": 2,
        "node_id": "MDQ6VXNlcjI=",
        "avatar_url": "https://alambic.github.com/avatars/u/2?",
        "gravatar_id": "",
        "url": "https://api.github.com/users/monalisa",
        "html_url": "https://github.com/monalisa",
        "followers_url": "https://api.github.com/users/monalisa/followers",
        "following_url": "https://api.github.com/users/monalisa/following{/other_user}",
        "gists_url": "https://api.github.com/users/monalisa/gists{/gist_id}",
        "starred_url": "https://api.github.com/users/monalisa/starred{/owner}{/repo}",
        "subscriptions_url": "https://api.github.com/users/monalisa/subscriptions",
        "organizations_url": "https://api.github.com/users/monalisa/orgs",
        "repos_url": "https://api.github.com/users/monalisa/repos",
        "events_url": "https://api.github.com/users/monalisa/events{/privacy}",
        "received_events_url": "https://api.github.com/users/monalisa/received_events",
        "type": "User",
        "site_admin": True,
    },
    "secret_type": "adafruit_io_key",
    "secret_type_display_name": "Adafruit IO Key",
    "secret": "aio_XXXXXXXXXXXXXXXXXXXXXXXXXXXX",
    "repository": {
        "id": 1296269,
        "node_id": "MDEwOlJlcG9zaXRvcnkxMjk2MjY5",
        "name": "Hello-World",
        "full_name": "octocat/Hello-World",
        "owner": {
            "login": "octocat",
            "id": 1,
            "node_id": "MDQ6VXNlcjE=",
            "avatar_url": "https://github.com/images/error/octocat_happy.gif",
            "gravatar_id": "",
            "url": "https://api.github.com/users/octocat",
            "html_url": "https://github.com/octocat",
            "followers_url": "https://api.github.com/users/octocat/followers",
            "following_url": "https://api.github.com/users/octocat/following{/other_user}",
            "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
            "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
            "subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
            "organizations_url": "https://api.github.com/users/octocat/orgs",
            "repos_url": "https://api.github.com/users/octocat/repos",
            "events_url": "https://api.github.com/users/octocat/events{/privacy}",
            "received_events_url": "https://api.github.com/users/octocat/received_events",
            "type": "User",
            "site_admin": False,
        },
        "private": False,
        "html_url": "https://github.com/octocat/Hello-World",
        "description": "This your first repo!",
        "fork": False,
        "url": "https://api.github.com/repos/octocat/Hello-World",
        "archive_url": "https://api.github.com/repos/octocat/Hello-World/{archive_format}{/ref}",
        "assignees_url": "https://api.github.com/repos/octocat/Hello-World/assignees{/user}",
        "blobs_url": "https://api.github.com/repos/octocat/Hello-World/git/blobs{/sha}",
        "branches_url": "https://api.github.com/repos/octocat/Hello-World/branches{/branch}",
        "collaborators_url": "https://api.github.com/repos/octocat/Hello-World/collaborators{/collaborator}",
        "comments_url": "https://api.github.com/repos/octocat/Hello-World/comments{/number}",
        "commits_url": "https://api.github.com/repos/octocat/Hello-World/commits{/sha}",
        "compare_url": "https://api.github.com/repos/octocat/Hello-World/compare/{base}...{head}",
        "contents_url": "https://api.github.com/repos/octocat/Hello-World/contents/{+path}",
        "contributors_url": "https://api.github.com/repos/octocat/Hello-World/contributors",
        "deployments_url": "https://api.github.com/repos/octocat/Hello-World/deployments",
        "downloads_url": "https://api.github.com/repos/octocat/Hello-World/downloads",
        "events_url": "https://api.github.com/repos/octocat/Hello-World/events",
        "forks_url": "https://api.github.com/repos/octocat/Hello-World/forks",
        "git_commits_url": "https://api.github.com/repos/octocat/Hello-World/git/commits{/sha}",
        "git_refs_url": "https://api.github.com/repos/octocat/Hello-World/git/refs{/sha}",
        "git_tags_url": "https://api.github.com/repos/octocat/Hello-World/git/tags{/sha}",
        "issue_comment_url": "https://api.github.com/repos/octocat/Hello-World/issues/comments{/number}",
        "issue_events_url": "https://api.github.com/repos/octocat/Hello-World/issues/events{/number}",
        "issues_url": "https://api.github.com/repos/octocat/Hello-World/issues{/number}",
        "keys_url": "https://api.github.com/repos/octocat/Hello-World/keys{/key_id}",
        "labels_url": "https://api.github.com/repos/octocat/Hello-World/labels{/name}",
        "languages_url": "https://api.github.com/repos/octocat/Hello-World/languages",
        "merges_url": "https://api.github.com/repos/octocat/Hello-World/merges",
        "milestones_url": "https://api.github.com/repos/octocat/Hello-World/milestones{/number}",
        "notifications_url": "https://api.github.com/repos/octocat/Hello-World/notifications{?since,all,participating}",
        "pulls_url": "https://api.github.com/repos/octocat/Hello-World/pulls{/number}",
        "releases_url": "https://api.github.com/repos/octocat/Hello-World/releases{/id}",
        "stargazers_url": "https://api.github.com/repos/octocat/Hello-World/stargazers",
        "statuses_url": "https://api.github.com/repos/octocat/Hello-World/statuses/{sha}",
        "subscribers_url": "https://api.github.com/repos/octocat/Hello-World/subscribers",
        "subscription_url": "https://api.github.com/repos/octocat/Hello-World/subscription",
        "tags_url": "https://api.github.com/repos/octocat/Hello-World/tags",
        "teams_url": "https://api.github.com/repos/octocat/Hello-World/teams",
        "trees_url": "https://api.github.com/repos/octocat/Hello-World/git/trees{/sha}",
        "hooks_url": "https://api.github.com/repos/octocat/Hello-World/hooks",
    },
    "push_protection_bypassed_by": {
        "login": "monalisa",
        "id": 2,
        "node_id": "MDQ6VXNlcjI=",
        "avatar_url": "https://alambic.github.com/avatars/u/2?",
        "gravatar_id": "",
        "url": "https://api.github.com/users/monalisa",
        "html_url": "https://github.com/monalisa",
        "followers_url": "https://api.github.com/users/monalisa/followers",
        "following_url": "https://api.github.com/users/monalisa/following{/other_user}",
        "gists_url": "https://api.github.com/users/monalisa/gists{/gist_id}",
        "starred_url": "https://api.github.com/users/monalisa/starred{/owner}{/repo}",
        "subscriptions_url": "https://api.github.com/users/monalisa/subscriptions",
        "organizations_url": "https://api.github.com/users/monalisa/orgs",
        "repos_url": "https://api.github.com/users/monalisa/repos",
        "events_url": "https://api.github.com/users/monalisa/events{/privacy}",
        "received_events_url": "https://api.github.com/users/monalisa/received_events",
        "type": "User",
        "site_admin": True,
    },
    "push_protection_bypassed": True,
    "push_protection_bypassed_at": "2020-11-06T21:48:51Z",
    "resolution_comment": "Example comment",
}


class SecretScanningAlertTestCase(unittest.TestCase):
    def test_from_dict(self):
        alert = SecretScanningAlert.from_dict(ALERT)

        self.assertEqual(alert.number, 2)
        self.assertEqual(
            alert.url,
            "https://api.github.com/repos/owner/private-repo/secret-scanning/alerts/2",
        )
        self.assertEqual(
            alert.html_url,
            "https://github.com/owner/private-repo/security/secret-scanning/2",
        )
        self.assertEqual(
            alert.locations_url,
            "https://api.github.com/repos/owner/private-repo/secret-scanning/alerts/2/locations",
        )
        self.assertEqual(alert.state, AlertState.RESOLVED)
        self.assertEqual(alert.secret_type, "adafruit_io_key")
        self.assertEqual(alert.secret_type_display_name, "Adafruit IO Key")
        self.assertEqual(alert.secret, "aio_XXXXXXXXXXXXXXXXXXXXXXXXXXXX")
        self.assertEqual(alert.repository.id, 1296269)
        self.assertEqual(
            alert.created_at,
            datetime(2020, 11, 6, 18, 48, 51, tzinfo=timezone.utc),
        )
        self.assertIsNone(alert.updated_at)

        self.assertEqual(alert.resolution, Resolution.FALSE_POSITIVE)
        self.assertEqual(alert.resolution_comment, "Example comment")
        self.assertEqual(
            alert.resolved_at,
            datetime(2020, 11, 7, 2, 47, 13, tzinfo=timezone.utc),
        )
        self.assertEqual(alert.resolved_by.login, "monalisa")

        self.assertTrue(alert.push_protection_bypassed)
        self.assertEqual(alert.push_protection_bypassed_by.login, "monalisa")
        self.assertEqual(
            alert.push_protection_bypassed_at,
            datetime(2020, 11, 6, 21, 48, 51, tzinfo=timezone.utc),
        )
pontos-25.3.2/tests/github/models/test_tag.py000066400000000000000000000053601476255566300212470ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later
#

# pylint: disable=line-too-long
# ruff: noqa: E501

import unittest
from datetime import datetime, timezone

from pontos.github.models.tag import GitObjectType, Tag, VerificationReason


class TagModelTestCase(unittest.TestCase):
    def test_from_dict(self):
        data = {
            "node_id": "MDM6VGFnOTQwYmQzMzYyNDhlZmFlMGY5ZWU1YmM3YjJkNWM5ODU4ODdiMTZhYw==",
            "tag": "v0.0.1",
            "sha": "940bd336248efae0f9ee5bc7b2d5c985887b16ac",
            "url": "https://api.github.com/repos/octocat/Hello-World/git/tags/940bd336248efae0f9ee5bc7b2d5c985887b16ac",
            "message": "initial version",
            "tagger": {
                "name": "Monalisa Octocat",
                "email": "octocat@github.com",
                "date": "2014-11-07T22:01:45Z",
            },
            "object": {
                "type": "commit",
                "sha": "c3d0be41ecbe669545ee3e94d31ed9a4bc91ee3c",
                "url": "https://api.github.com/repos/octocat/Hello-World/git/commits/c3d0be41ecbe669545ee3e94d31ed9a4bc91ee3c",
            },
            "verification": {
                "verified": False,
                "reason": "unsigned",
                "signature": None,
                "payload": None,
            },
        }

        tag = Tag.from_dict(data)

        self.assertEqual(
            tag.node_id,
            "MDM6VGFnOTQwYmQzMzYyNDhlZmFlMGY5ZWU1YmM3YjJkNWM5ODU4ODdiMTZhYw==",
        )
        self.assertEqual(tag.tag, "v0.0.1")
        self.assertEqual(tag.sha, "940bd336248efae0f9ee5bc7b2d5c985887b16ac")
        self.assertEqual(
            tag.url,
            "https://api.github.com/repos/octocat/Hello-World/git/tags/940bd336248efae0f9ee5bc7b2d5c985887b16ac",
        )
        self.assertEqual(tag.message, "initial version")

        tagger = tag.tagger
        self.assertEqual(tagger.name, "Monalisa Octocat")
        self.assertEqual(tagger.email, "octocat@github.com")
        self.assertEqual(
            tagger.date, datetime(2014, 11, 7, 22, 1, 45, tzinfo=timezone.utc)
        )

        tag_object = tag.object
        self.assertEqual(tag_object.type, GitObjectType.COMMIT)
        self.assertEqual(
            tag_object.sha, "c3d0be41ecbe669545ee3e94d31ed9a4bc91ee3c"
        )
        self.assertEqual(
            tag_object.url,
            "https://api.github.com/repos/octocat/Hello-World/git/commits/c3d0be41ecbe669545ee3e94d31ed9a4bc91ee3c",
        )

        verification = tag.verification
        self.assertFalse(verification.verified)
        self.assertEqual(verification.reason, VerificationReason.UNSIGNED)
        self.assertIsNone(verification.payload)
        self.assertIsNone(verification.signature)
pontos-25.3.2/tests/github/models/test_user.py000066400000000000000000000037521476255566300214550ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later

# ruff: noqa:E501

import unittest
from datetime import datetime, timezone

from pontos.github.models.user import (
    EmailInformation,
    SSHPublicKey,
    SSHPublicKeyExtended,
)


class SSHPublicKeyTestCase(unittest.TestCase):
    def test_from_dict(self):
        key = SSHPublicKey.from_dict({"id": 1, "key": "ssh-rsa AAA..."})

        self.assertEqual(key.id, 1)
        self.assertEqual(key.key, "ssh-rsa AAA...")


class SSHPublicKeyExtendedTestCase(unittest.TestCase):
    def test_from_dict(self):
        key = SSHPublicKeyExtended.from_dict(
            {
                "key": "2Sg8iYjAxxmI2LvUXpJjkYrMxURPc8r+dB7TJyvv1234",
                "id": 2,
                "url": "https://api.github.com/user/keys/2",
                "title": "ssh-rsa AAAAB3NzaC1yc2EAAA",
                "created_at": "2020-06-11T21:31:57Z",
                "verified": False,
                "read_only": False,
            }
        )

        self.assertEqual(key.id, 2)
        self.assertEqual(
            key.key, "2Sg8iYjAxxmI2LvUXpJjkYrMxURPc8r+dB7TJyvv1234"
        )
        self.assertEqual(key.url, "https://api.github.com/user/keys/2")
        self.assertEqual(key.title, "ssh-rsa AAAAB3NzaC1yc2EAAA")
        self.assertFalse(key.verified)
        self.assertFalse(key.read_only)
        self.assertEqual(
            key.created_at,
            datetime(2020, 6, 11, 21, 31, 57, tzinfo=timezone.utc),
        )


class EmailInformationTestCase(unittest.TestCase):
    def test_from_dict(self):
        email = EmailInformation.from_dict(
            {
                "email": "octocat@github.com",
                "verified": True,
                "primary": True,
                "visibility": "public",
            }
        )

        self.assertEqual(email.email, "octocat@github.com")
        self.assertEqual(email.visibility, "public")
        self.assertTrue(email.primary)
        self.assertTrue(email.verified)
pontos-25.3.2/tests/github/models/test_workflow.py000066400000000000000000001176141476255566300223540ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later
#

# pylint: disable=line-too-long

import unittest
from datetime import datetime, timedelta, timezone

from pontos.github.models.base import Event
from pontos.github.models.workflow import (
    Workflow,
    WorkflowRun,
    WorkflowRunStatus,
    WorkflowState,
)


class WorkflowTestCase(unittest.TestCase):
    def test_from_dict(self):
        workflow = Workflow.from_dict(
            {
                "id": 1,
                "node_id": "MDg6V29ya2Zsb3cxNjEzMzU=",
                "name": "CI",
                "path": ".github/workflows/blank.yaml",
                "state": "active",
                "created_at": "2020-01-08T23:48:37.000-08:00",
                "updated_at": "2020-01-08T23:50:21.000-08:00",
                "url": "https://api.github.com/repos/octo-org/octo-repo/actions/workflows/161335",
                "html_url": "https://github.com/octo-org/octo-repo/blob/master/.github/workflows/161335",
                "badge_url": "https://github.com/octo-org/octo-repo/workflows/CI/badge.svg",
            }
        )

        self.assertEqual(workflow.id, 1)
        self.assertEqual(workflow.node_id, "MDg6V29ya2Zsb3cxNjEzMzU=")
        self.assertEqual(workflow.name, "CI")
        self.assertEqual(workflow.path, ".github/workflows/blank.yaml")
        self.assertEqual(workflow.state, WorkflowState.ACTIVE)
        self.assertEqual(
            workflow.created_at,
            datetime(
                2020, 1, 8, 23, 48, 37, tzinfo=timezone(timedelta(hours=-8))
            ),
        )
        self.assertEqual(
            workflow.updated_at,
            datetime(
                2020, 1, 8, 23, 50, 21, tzinfo=timezone(timedelta(hours=-8))
            ),
        )
        self.assertEqual(
            workflow.url,
            "https://api.github.com/repos/octo-org/octo-repo/actions/workflows/161335",
        )
        self.assertEqual(
            workflow.html_url,
            "https://github.com/octo-org/octo-repo/blob/master/.github/workflows/161335",
        )
        self.assertEqual(
            workflow.badge_url,
            "https://github.com/octo-org/octo-repo/workflows/CI/badge.svg",
        )


class WorkflowRunTestCase(unittest.TestCase):
    def test_from_dict(self):
        run = WorkflowRun.from_dict(
            {
                "id": 1,
                "name": "Build",
                "node_id": "MDEyOldvcmtmbG93IFJ1bjI2OTI4OQ==",
                "check_suite_id": 42,
                "check_suite_node_id": "MDEwOkNoZWNrU3VpdGU0Mg==",
                "head_branch": "master",
                "head_sha": "acb5820ced9479c074f688cc328bf03f341a511d",
                "run_number": 562,
                "event": "push",
                "status": "queued",
                "conclusion": None,
                "workflow_id": 159038,
                "url": "https://api.github.com/repos/octo-org/octo-repo/actions/runs/30433642",
                "html_url": "https://github.com/octo-org/octo-repo/actions/runs/30433642",
                "pull_requests": [],
                "created_at": "2020-01-22T19:33:08Z",
                "updated_at": "2020-01-22T19:33:08Z",
                "actor": {
                    "login": "octocat",
                    "id": 1,
                    "node_id": "MDQ6VXNlcjE=",
                    "avatar_url": "https://github.com/images/error/octocat_happy.gif",
                    "gravatar_id": "",
                    "url": "https://api.github.com/users/octocat",
                    "html_url": "https://github.com/octocat",
                    "followers_url": "https://api.github.com/users/octocat/followers",
                    "following_url": "https://api.github.com/users/octocat/following{/other_user}",
                    "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
                    "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
                    "subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
                    "organizations_url": "https://api.github.com/users/octocat/orgs",
                    "repos_url": "https://api.github.com/users/octocat/repos",
                    "events_url": "https://api.github.com/users/octocat/events{/privacy}",
                    "received_events_url": "https://api.github.com/users/octocat/received_events",
                    "type": "User",
                    "site_admin": False,
                },
                "run_attempt": 1,
                "run_started_at": "2020-01-22T19:33:08Z",
                "triggering_actor": {
                    "login": "octocat",
                    "id": 1,
                    "node_id": "MDQ6VXNlcjE=",
                    "avatar_url": "https://github.com/images/error/octocat_happy.gif",
                    "gravatar_id": "",
                    "url": "https://api.github.com/users/octocat",
                    "html_url": "https://github.com/octocat",
                    "followers_url": "https://api.github.com/users/octocat/followers",
                    "following_url": "https://api.github.com/users/octocat/following{/other_user}",
                    "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
                    "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
                    "subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
                    "organizations_url": "https://api.github.com/users/octocat/orgs",
                    "repos_url": "https://api.github.com/users/octocat/repos",
                    "events_url": "https://api.github.com/users/octocat/events{/privacy}",
                    "received_events_url": "https://api.github.com/users/octocat/received_events",
                    "type": "User",
                    "site_admin": False,
                },
                "jobs_url": "https://api.github.com/repos/octo-org/octo-repo/actions/runs/30433642/jobs",
                "logs_url": "https://api.github.com/repos/octo-org/octo-repo/actions/runs/30433642/logs",
                "check_suite_url": "https://api.github.com/repos/octo-org/octo-repo/check-suites/414944374",
                "artifacts_url": "https://api.github.com/repos/octo-org/octo-repo/actions/runs/30433642/artifacts",
                "cancel_url": "https://api.github.com/repos/octo-org/octo-repo/actions/runs/30433642/cancel",
                "rerun_url": "https://api.github.com/repos/octo-org/octo-repo/actions/runs/30433642/rerun",
                "workflow_url": "https://api.github.com/repos/octo-org/octo-repo/actions/workflows/159038",
                "head_commit": {
                    "id": "acb5820ced9479c074f688cc328bf03f341a511d",
                    "tree_id": "d23f6eedb1e1b9610bbc754ddb5197bfe7271223",
                    "message": "Create linter.yaml",
                    "timestamp": "2020-01-22T19:33:05Z",
                    "author": {
                        "name": "Octo Cat",
                        "email": "octocat@github.com",
                    },
                    "committer": {
                        "name": "GitHub",
                        "email": "noreply@github.com",
                    },
                },
                "repository": {
                    "id": 1296269,
                    "node_id": "MDEwOlJlcG9zaXRvcnkxMjk2MjY5",
                    "name": "Hello-World",
                    "full_name": "octocat/Hello-World",
                    "owner": {
                        "login": "octocat",
                        "id": 1,
                        "node_id": "MDQ6VXNlcjE=",
                        "avatar_url": "https://github.com/images/error/octocat_happy.gif",
                        "gravatar_id": "",
                        "url": "https://api.github.com/users/octocat",
                        "html_url": "https://github.com/octocat",
                        "followers_url": "https://api.github.com/users/octocat/followers",
                        "following_url": "https://api.github.com/users/octocat/following{/other_user}",
                        "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
                        "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
                        "subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
                        "organizations_url": "https://api.github.com/users/octocat/orgs",
                        "repos_url": "https://api.github.com/users/octocat/repos",
                        "events_url": "https://api.github.com/users/octocat/events{/privacy}",
                        "received_events_url": "https://api.github.com/users/octocat/received_events",
                        "type": "User",
                        "site_admin": False,
                    },
                    "private": False,
                    "html_url": "https://github.com/octocat/Hello-World",
                    "description": "This your first repo!",
                    "fork": False,
                    "url": "https://api.github.com/repos/octocat/Hello-World",
                    "archive_url": "https://api.github.com/repos/octocat/Hello-World/{archive_format}{/ref}",
                    "assignees_url": "https://api.github.com/repos/octocat/Hello-World/assignees{/user}",
                    "blobs_url": "https://api.github.com/repos/octocat/Hello-World/git/blobs{/sha}",
                    "branches_url": "https://api.github.com/repos/octocat/Hello-World/branches{/branch}",
                    "collaborators_url": "https://api.github.com/repos/octocat/Hello-World/collaborators{/collaborator}",
                    "comments_url": "https://api.github.com/repos/octocat/Hello-World/comments{/number}",
                    "commits_url": "https://api.github.com/repos/octocat/Hello-World/commits{/sha}",
                    "compare_url": "https://api.github.com/repos/octocat/Hello-World/compare/{base}...{head}",
                    "contents_url": "https://api.github.com/repos/octocat/Hello-World/contents/{+path}",
                    "contributors_url": "https://api.github.com/repos/octocat/Hello-World/contributors",
                    "deployments_url": "https://api.github.com/repos/octocat/Hello-World/deployments",
                    "downloads_url": "https://api.github.com/repos/octocat/Hello-World/downloads",
                    "events_url": "https://api.github.com/repos/octocat/Hello-World/events",
                    "forks_url": "https://api.github.com/repos/octocat/Hello-World/forks",
                    "git_commits_url": "https://api.github.com/repos/octocat/Hello-World/git/commits{/sha}",
                    "git_refs_url": "https://api.github.com/repos/octocat/Hello-World/git/refs{/sha}",
                    "git_tags_url": "https://api.github.com/repos/octocat/Hello-World/git/tags{/sha}",
                    "git_url": "git:github.com/octocat/Hello-World.git",
                    "issue_comment_url": "https://api.github.com/repos/octocat/Hello-World/issues/comments{/number}",
                    "issue_events_url": "https://api.github.com/repos/octocat/Hello-World/issues/events{/number}",
                    "issues_url": "https://api.github.com/repos/octocat/Hello-World/issues{/number}",
                    "keys_url": "https://api.github.com/repos/octocat/Hello-World/keys{/key_id}",
                    "labels_url": "https://api.github.com/repos/octocat/Hello-World/labels{/name}",
                    "languages_url": "https://api.github.com/repos/octocat/Hello-World/languages",
                    "merges_url": "https://api.github.com/repos/octocat/Hello-World/merges",
                    "milestones_url": "https://api.github.com/repos/octocat/Hello-World/milestones{/number}",
                    "notifications_url": "https://api.github.com/repos/octocat/Hello-World/notifications{?since,all,participating}",
                    "pulls_url": "https://api.github.com/repos/octocat/Hello-World/pulls{/number}",
                    "releases_url": "https://api.github.com/repos/octocat/Hello-World/releases{/id}",
                    "ssh_url": "git@github.com:octocat/Hello-World.git",
                    "stargazers_url": "https://api.github.com/repos/octocat/Hello-World/stargazers",
                    "statuses_url": "https://api.github.com/repos/octocat/Hello-World/statuses/{sha}",
                    "subscribers_url": "https://api.github.com/repos/octocat/Hello-World/subscribers",
                    "subscription_url": "https://api.github.com/repos/octocat/Hello-World/subscription",
                    "tags_url": "https://api.github.com/repos/octocat/Hello-World/tags",
                    "teams_url": "https://api.github.com/repos/octocat/Hello-World/teams",
                    "trees_url": "https://api.github.com/repos/octocat/Hello-World/git/trees{/sha}",
                    "hooks_url": "http://api.github.com/repos/octocat/Hello-World/hooks",
                },
                "head_repository": {
                    "id": 217723378,
                    "node_id": "MDEwOlJlcG9zaXRvcnkyMTc3MjMzNzg=",
                    "name": "octo-repo",
                    "full_name": "octo-org/octo-repo",
                    "private": True,
                    "owner": {
                        "login": "octocat",
                        "id": 1,
                        "node_id": "MDQ6VXNlcjE=",
                        "avatar_url": "https://github.com/images/error/octocat_happy.gif",
                        "gravatar_id": "",
                        "url": "https://api.github.com/users/octocat",
                        "html_url": "https://github.com/octocat",
                        "followers_url": "https://api.github.com/users/octocat/followers",
                        "following_url": "https://api.github.com/users/octocat/following{/other_user}",
                        "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
                        "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
                        "subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
                        "organizations_url": "https://api.github.com/users/octocat/orgs",
                        "repos_url": "https://api.github.com/users/octocat/repos",
                        "events_url": "https://api.github.com/users/octocat/events{/privacy}",
                        "received_events_url": "https://api.github.com/users/octocat/received_events",
                        "type": "User",
                        "site_admin": False,
                    },
                    "html_url": "https://github.com/octo-org/octo-repo",
                    "description": None,
                    "fork": False,
                    "url": "https://api.github.com/repos/octo-org/octo-repo",
                    "forks_url": "https://api.github.com/repos/octo-org/octo-repo/forks",
                    "keys_url": "https://api.github.com/repos/octo-org/octo-repo/keys{/key_id}",
                    "collaborators_url": "https://api.github.com/repos/octo-org/octo-repo/collaborators{/collaborator}",
                    "teams_url": "https://api.github.com/repos/octo-org/octo-repo/teams",
                    "hooks_url": "https://api.github.com/repos/octo-org/octo-repo/hooks",
                    "issue_events_url": "https://api.github.com/repos/octo-org/octo-repo/issues/events{/number}",
                    "events_url": "https://api.github.com/repos/octo-org/octo-repo/events",
                    "assignees_url": "https://api.github.com/repos/octo-org/octo-repo/assignees{/user}",
                    "branches_url": "https://api.github.com/repos/octo-org/octo-repo/branches{/branch}",
                    "tags_url": "https://api.github.com/repos/octo-org/octo-repo/tags",
                    "blobs_url": "https://api.github.com/repos/octo-org/octo-repo/git/blobs{/sha}",
                    "git_tags_url": "https://api.github.com/repos/octo-org/octo-repo/git/tags{/sha}",
                    "git_refs_url": "https://api.github.com/repos/octo-org/octo-repo/git/refs{/sha}",
                    "trees_url": "https://api.github.com/repos/octo-org/octo-repo/git/trees{/sha}",
                    "statuses_url": "https://api.github.com/repos/octo-org/octo-repo/statuses/{sha}",
                    "languages_url": "https://api.github.com/repos/octo-org/octo-repo/languages",
                    "stargazers_url": "https://api.github.com/repos/octo-org/octo-repo/stargazers",
                    "contributors_url": "https://api.github.com/repos/octo-org/octo-repo/contributors",
                    "subscribers_url": "https://api.github.com/repos/octo-org/octo-repo/subscribers",
                    "subscription_url": "https://api.github.com/repos/octo-org/octo-repo/subscription",
                    "commits_url": "https://api.github.com/repos/octo-org/octo-repo/commits{/sha}",
                    "git_commits_url": "https://api.github.com/repos/octo-org/octo-repo/git/commits{/sha}",
                    "comments_url": "https://api.github.com/repos/octo-org/octo-repo/comments{/number}",
                    "issue_comment_url": "https://api.github.com/repos/octo-org/octo-repo/issues/comments{/number}",
                    "contents_url": "https://api.github.com/repos/octo-org/octo-repo/contents/{+path}",
                    "compare_url": "https://api.github.com/repos/octo-org/octo-repo/compare/{base}...{head}",
                    "merges_url": "https://api.github.com/repos/octo-org/octo-repo/merges",
                    "archive_url": "https://api.github.com/repos/octo-org/octo-repo/{archive_format}{/ref}",
                    "downloads_url": "https://api.github.com/repos/octo-org/octo-repo/downloads",
                    "issues_url": "https://api.github.com/repos/octo-org/octo-repo/issues{/number}",
                    "pulls_url": "https://api.github.com/repos/octo-org/octo-repo/pulls{/number}",
                    "milestones_url": "https://api.github.com/repos/octo-org/octo-repo/milestones{/number}",
                    "notifications_url": "https://api.github.com/repos/octo-org/octo-repo/notifications{?since,all,participating}",
                    "labels_url": "https://api.github.com/repos/octo-org/octo-repo/labels{/name}",
                    "releases_url": "https://api.github.com/repos/octo-org/octo-repo/releases{/id}",
                    "deployments_url": "https://api.github.com/repos/octo-org/octo-repo/deployments",
                },
            }
        )

        self.assertEqual(run.id, 1)
        self.assertEqual(run.name, "Build")
        self.assertEqual(run.node_id, "MDEyOldvcmtmbG93IFJ1bjI2OTI4OQ==")
        self.assertEqual(run.check_suite_id, 42)
        self.assertEqual(run.check_suite_node_id, "MDEwOkNoZWNrU3VpdGU0Mg==")
        self.assertEqual(run.head_branch, "master")
        self.assertEqual(
            run.head_sha, "acb5820ced9479c074f688cc328bf03f341a511d"
        )
        self.assertEqual(run.run_number, 562)
        self.assertEqual(run.event, Event.PUSH)
        self.assertEqual(run.status, WorkflowRunStatus.QUEUED)
        self.assertEqual(run.conclusion, None)
        self.assertEqual(run.workflow_id, 159038)
        self.assertEqual(
            run.url,
            "https://api.github.com/repos/octo-org/octo-repo/actions/runs/30433642",
        )
        self.assertEqual(
            run.html_url,
            "https://github.com/octo-org/octo-repo/actions/runs/30433642",
        )
        self.assertEqual(run.pull_requests, [])
        self.assertEqual(
            run.created_at,
            datetime(2020, 1, 22, 19, 33, 8, tzinfo=timezone.utc),
        )
        self.assertEqual(
            run.updated_at,
            datetime(2020, 1, 22, 19, 33, 8, tzinfo=timezone.utc),
        )
        self.assertEqual(run.run_attempt, 1)
        self.assertEqual(
            run.run_started_at,
            datetime(2020, 1, 22, 19, 33, 8, tzinfo=timezone.utc),
        )
        self.assertEqual(
            run.jobs_url,
            "https://api.github.com/repos/octo-org/octo-repo/actions/runs/30433642/jobs",
        )
        self.assertEqual(
            run.logs_url,
            "https://api.github.com/repos/octo-org/octo-repo/actions/runs/30433642/logs",
        )
        self.assertEqual(
            run.check_suite_url,
            "https://api.github.com/repos/octo-org/octo-repo/check-suites/414944374",
        )
        self.assertEqual(
            run.artifacts_url,
            "https://api.github.com/repos/octo-org/octo-repo/actions/runs/30433642/artifacts",
        )
        self.assertEqual(
            run.cancel_url,
            "https://api.github.com/repos/octo-org/octo-repo/actions/runs/30433642/cancel",
        )
        self.assertEqual(
            run.rerun_url,
            "https://api.github.com/repos/octo-org/octo-repo/actions/runs/30433642/rerun",
        )
        self.assertEqual(
            run.workflow_url,
            "https://api.github.com/repos/octo-org/octo-repo/actions/workflows/159038",
        )
        commit = run.head_commit
        self.assertEqual(commit.id, "acb5820ced9479c074f688cc328bf03f341a511d")
        self.assertEqual(
            commit.tree_id, "d23f6eedb1e1b9610bbc754ddb5197bfe7271223"
        )
        self.assertEqual(commit.message, "Create linter.yaml")
        self.assertEqual(
            commit.timestamp,
            datetime(2020, 1, 22, 19, 33, 5, tzinfo=timezone.utc),
        )
        self.assertEqual(commit.author.name, "Octo Cat")
        self.assertEqual(commit.author.email, "octocat@github.com")
        self.assertEqual(commit.committer.name, "GitHub")
        self.assertEqual(commit.committer.email, "noreply@github.com")

        user = run.actor
        self.assertEqual(user.login, "octocat")
        self.assertEqual(user.id, 1)
        self.assertEqual(user.node_id, "MDQ6VXNlcjE=")
        self.assertEqual(
            user.avatar_url, "https://github.com/images/error/octocat_happy.gif"
        )
        self.assertEqual(user.gravatar_id, "")
        self.assertEqual(user.url, "https://api.github.com/users/octocat")
        self.assertEqual(user.html_url, "https://github.com/octocat")
        self.assertEqual(
            user.followers_url, "https://api.github.com/users/octocat/followers"
        )
        self.assertEqual(
            user.following_url,
            "https://api.github.com/users/octocat/following{/other_user}",
        )
        self.assertEqual(
            user.gists_url,
            "https://api.github.com/users/octocat/gists{/gist_id}",
        )
        self.assertEqual(
            user.starred_url,
            "https://api.github.com/users/octocat/starred{/owner}{/repo}",
        )
        self.assertEqual(
            user.subscriptions_url,
            "https://api.github.com/users/octocat/subscriptions",
        )
        self.assertEqual(
            user.organizations_url, "https://api.github.com/users/octocat/orgs"
        )
        self.assertEqual(
            user.repos_url, "https://api.github.com/users/octocat/repos"
        )
        self.assertEqual(
            user.events_url,
            "https://api.github.com/users/octocat/events{/privacy}",
        )
        self.assertEqual(
            user.received_events_url,
            "https://api.github.com/users/octocat/received_events",
        )
        self.assertEqual(user.type, "User")
        self.assertEqual(user.site_admin, False)

        user = run.triggering_actor
        self.assertEqual(user.login, "octocat")
        self.assertEqual(user.id, 1)
        self.assertEqual(user.node_id, "MDQ6VXNlcjE=")
        self.assertEqual(
            user.avatar_url, "https://github.com/images/error/octocat_happy.gif"
        )
        self.assertEqual(user.gravatar_id, "")
        self.assertEqual(user.url, "https://api.github.com/users/octocat")
        self.assertEqual(user.html_url, "https://github.com/octocat")
        self.assertEqual(
            user.followers_url, "https://api.github.com/users/octocat/followers"
        )
        self.assertEqual(
            user.following_url,
            "https://api.github.com/users/octocat/following{/other_user}",
        )
        self.assertEqual(
            user.gists_url,
            "https://api.github.com/users/octocat/gists{/gist_id}",
        )
        self.assertEqual(
            user.starred_url,
            "https://api.github.com/users/octocat/starred{/owner}{/repo}",
        )
        self.assertEqual(
            user.subscriptions_url,
            "https://api.github.com/users/octocat/subscriptions",
        )
        self.assertEqual(
            user.organizations_url, "https://api.github.com/users/octocat/orgs"
        )
        self.assertEqual(
            user.repos_url, "https://api.github.com/users/octocat/repos"
        )
        self.assertEqual(
            user.events_url,
            "https://api.github.com/users/octocat/events{/privacy}",
        )
        self.assertEqual(
            user.received_events_url,
            "https://api.github.com/users/octocat/received_events",
        )
        self.assertEqual(user.type, "User")
        self.assertEqual(user.site_admin, False)

        repo = run.repository
        self.assertEqual(repo.id, 1296269)
        self.assertEqual(repo.node_id, "MDEwOlJlcG9zaXRvcnkxMjk2MjY5")
        self.assertEqual(repo.name, "Hello-World")
        self.assertEqual(repo.full_name, "octocat/Hello-World")
        self.assertFalse(repo.private)
        self.assertEqual(
            repo.html_url, "https://github.com/octocat/Hello-World"
        )
        self.assertEqual(repo.description, "This your first repo!")
        self.assertFalse(repo.fork)
        self.assertEqual(
            repo.url, "https://api.github.com/repos/octocat/Hello-World"
        )
        self.assertEqual(
            repo.archive_url,
            "https://api.github.com/repos/octocat/Hello-World/{archive_format}{/ref}",
        )
        self.assertEqual(
            repo.assignees_url,
            "https://api.github.com/repos/octocat/Hello-World/assignees{/user}",
        )
        self.assertEqual(
            repo.blobs_url,
            "https://api.github.com/repos/octocat/Hello-World/git/blobs{/sha}",
        )
        self.assertEqual(
            repo.branches_url,
            "https://api.github.com/repos/octocat/Hello-World/branches{/branch}",
        )
        self.assertEqual(
            repo.collaborators_url,
            "https://api.github.com/repos/octocat/Hello-World/collaborators{/collaborator}",
        )
        self.assertEqual(
            repo.comments_url,
            "https://api.github.com/repos/octocat/Hello-World/comments{/number}",
        )
        self.assertEqual(
            repo.commits_url,
            "https://api.github.com/repos/octocat/Hello-World/commits{/sha}",
        )
        self.assertEqual(
            repo.compare_url,
            "https://api.github.com/repos/octocat/Hello-World/compare/{base}...{head}",
        )
        self.assertEqual(
            repo.contents_url,
            "https://api.github.com/repos/octocat/Hello-World/contents/{+path}",
        )
        self.assertEqual(
            repo.contributors_url,
            "https://api.github.com/repos/octocat/Hello-World/contributors",
        )
        self.assertEqual(
            repo.deployments_url,
            "https://api.github.com/repos/octocat/Hello-World/deployments",
        )
        self.assertEqual(
            repo.downloads_url,
            "https://api.github.com/repos/octocat/Hello-World/downloads",
        )
        self.assertEqual(
            repo.events_url,
            "https://api.github.com/repos/octocat/Hello-World/events",
        )
        self.assertEqual(
            repo.forks_url,
            "https://api.github.com/repos/octocat/Hello-World/forks",
        )
        self.assertEqual(
            repo.git_commits_url,
            "https://api.github.com/repos/octocat/Hello-World/git/commits{/sha}",
        )
        self.assertEqual(
            repo.git_refs_url,
            "https://api.github.com/repos/octocat/Hello-World/git/refs{/sha}",
        )
        self.assertEqual(
            repo.git_tags_url,
            "https://api.github.com/repos/octocat/Hello-World/git/tags{/sha}",
        )
        self.assertEqual(repo.git_url, "git:github.com/octocat/Hello-World.git")
        self.assertEqual(
            repo.issue_comment_url,
            "https://api.github.com/repos/octocat/Hello-World/issues/comments{/number}",
        )
        self.assertEqual(
            repo.issue_events_url,
            "https://api.github.com/repos/octocat/Hello-World/issues/events{/number}",
        )
        self.assertEqual(
            repo.issues_url,
            "https://api.github.com/repos/octocat/Hello-World/issues{/number}",
        )
        self.assertEqual(
            repo.keys_url,
            "https://api.github.com/repos/octocat/Hello-World/keys{/key_id}",
        )
        self.assertEqual(
            repo.labels_url,
            "https://api.github.com/repos/octocat/Hello-World/labels{/name}",
        )
        self.assertEqual(
            repo.languages_url,
            "https://api.github.com/repos/octocat/Hello-World/languages",
        )
        self.assertEqual(
            repo.merges_url,
            "https://api.github.com/repos/octocat/Hello-World/merges",
        )
        self.assertEqual(
            repo.milestones_url,
            "https://api.github.com/repos/octocat/Hello-World/milestones{/number}",
        )
        self.assertEqual(
            repo.notifications_url,
            "https://api.github.com/repos/octocat/Hello-World/notifications{?since,all,participating}",
        )
        self.assertEqual(
            repo.pulls_url,
            "https://api.github.com/repos/octocat/Hello-World/pulls{/number}",
        )
        self.assertEqual(
            repo.releases_url,
            "https://api.github.com/repos/octocat/Hello-World/releases{/id}",
        )
        self.assertEqual(repo.ssh_url, "git@github.com:octocat/Hello-World.git")
        self.assertEqual(
            repo.stargazers_url,
            "https://api.github.com/repos/octocat/Hello-World/stargazers",
        )
        self.assertEqual(
            repo.statuses_url,
            "https://api.github.com/repos/octocat/Hello-World/statuses/{sha}",
        )
        self.assertEqual(
            repo.subscribers_url,
            "https://api.github.com/repos/octocat/Hello-World/subscribers",
        )
        self.assertEqual(
            repo.subscription_url,
            "https://api.github.com/repos/octocat/Hello-World/subscription",
        )
        self.assertEqual(
            repo.tags_url,
            "https://api.github.com/repos/octocat/Hello-World/tags",
        )
        self.assertEqual(
            repo.teams_url,
            "https://api.github.com/repos/octocat/Hello-World/teams",
        )
        self.assertEqual(
            repo.trees_url,
            "https://api.github.com/repos/octocat/Hello-World/git/trees{/sha}",
        )
        self.assertEqual(
            repo.hooks_url,
            "http://api.github.com/repos/octocat/Hello-World/hooks",
        )

        user = repo.owner
        self.assertEqual(user.login, "octocat")
        self.assertEqual(user.id, 1)
        self.assertEqual(user.node_id, "MDQ6VXNlcjE=")
        self.assertEqual(
            user.avatar_url, "https://github.com/images/error/octocat_happy.gif"
        )
        self.assertEqual(user.gravatar_id, "")
        self.assertEqual(user.url, "https://api.github.com/users/octocat")
        self.assertEqual(user.html_url, "https://github.com/octocat")
        self.assertEqual(
            user.followers_url, "https://api.github.com/users/octocat/followers"
        )
        self.assertEqual(
            user.following_url,
            "https://api.github.com/users/octocat/following{/other_user}",
        )
        self.assertEqual(
            user.gists_url,
            "https://api.github.com/users/octocat/gists{/gist_id}",
        )
        self.assertEqual(
            user.starred_url,
            "https://api.github.com/users/octocat/starred{/owner}{/repo}",
        )
        self.assertEqual(
            user.subscriptions_url,
            "https://api.github.com/users/octocat/subscriptions",
        )
        self.assertEqual(
            user.organizations_url, "https://api.github.com/users/octocat/orgs"
        )
        self.assertEqual(
            user.repos_url, "https://api.github.com/users/octocat/repos"
        )
        self.assertEqual(
            user.events_url,
            "https://api.github.com/users/octocat/events{/privacy}",
        )
        self.assertEqual(
            user.received_events_url,
            "https://api.github.com/users/octocat/received_events",
        )
        self.assertEqual(user.type, "User")
        self.assertEqual(user.site_admin, False)

        repo = run.head_repository
        self.assertEqual(repo.id, 217723378)
        self.assertEqual(repo.node_id, "MDEwOlJlcG9zaXRvcnkyMTc3MjMzNzg=")
        self.assertEqual(repo.name, "octo-repo")
        self.assertEqual(repo.full_name, "octo-org/octo-repo")
        self.assertTrue(repo.private)
        self.assertEqual(repo.html_url, "https://github.com/octo-org/octo-repo")
        self.assertIsNone(repo.description)
        self.assertFalse(repo.fork)
        self.assertEqual(
            repo.url, "https://api.github.com/repos/octo-org/octo-repo"
        )
        self.assertEqual(
            repo.forks_url,
            "https://api.github.com/repos/octo-org/octo-repo/forks",
        )
        self.assertEqual(
            repo.keys_url,
            "https://api.github.com/repos/octo-org/octo-repo/keys{/key_id}",
        )
        self.assertEqual(
            repo.collaborators_url,
            "https://api.github.com/repos/octo-org/octo-repo/collaborators{/collaborator}",
        )
        self.assertEqual(
            repo.teams_url,
            "https://api.github.com/repos/octo-org/octo-repo/teams",
        )
        self.assertEqual(
            repo.hooks_url,
            "https://api.github.com/repos/octo-org/octo-repo/hooks",
        )
        self.assertEqual(
            repo.issue_events_url,
            "https://api.github.com/repos/octo-org/octo-repo/issues/events{/number}",
        )
        self.assertEqual(
            repo.events_url,
            "https://api.github.com/repos/octo-org/octo-repo/events",
        )
        self.assertEqual(
            repo.assignees_url,
            "https://api.github.com/repos/octo-org/octo-repo/assignees{/user}",
        )
        self.assertEqual(
            repo.branches_url,
            "https://api.github.com/repos/octo-org/octo-repo/branches{/branch}",
        )
        self.assertEqual(
            repo.tags_url,
            "https://api.github.com/repos/octo-org/octo-repo/tags",
        )
        self.assertEqual(
            repo.blobs_url,
            "https://api.github.com/repos/octo-org/octo-repo/git/blobs{/sha}",
        )
        self.assertEqual(
            repo.git_tags_url,
            "https://api.github.com/repos/octo-org/octo-repo/git/tags{/sha}",
        )
        self.assertEqual(
            repo.git_refs_url,
            "https://api.github.com/repos/octo-org/octo-repo/git/refs{/sha}",
        )
        self.assertEqual(
            repo.trees_url,
            "https://api.github.com/repos/octo-org/octo-repo/git/trees{/sha}",
        )
        self.assertEqual(
            repo.statuses_url,
            "https://api.github.com/repos/octo-org/octo-repo/statuses/{sha}",
        )
        self.assertEqual(
            repo.languages_url,
            "https://api.github.com/repos/octo-org/octo-repo/languages",
        )
        self.assertEqual(
            repo.stargazers_url,
            "https://api.github.com/repos/octo-org/octo-repo/stargazers",
        )
        self.assertEqual(
            repo.contributors_url,
            "https://api.github.com/repos/octo-org/octo-repo/contributors",
        )
        self.assertEqual(
            repo.subscribers_url,
            "https://api.github.com/repos/octo-org/octo-repo/subscribers",
        )
        self.assertEqual(
            repo.subscription_url,
            "https://api.github.com/repos/octo-org/octo-repo/subscription",
        )
        self.assertEqual(
            repo.commits_url,
            "https://api.github.com/repos/octo-org/octo-repo/commits{/sha}",
        )
        self.assertEqual(
            repo.git_commits_url,
            "https://api.github.com/repos/octo-org/octo-repo/git/commits{/sha}",
        )
        self.assertEqual(
            repo.comments_url,
            "https://api.github.com/repos/octo-org/octo-repo/comments{/number}",
        )
        self.assertEqual(
            repo.issue_comment_url,
            "https://api.github.com/repos/octo-org/octo-repo/issues/comments{/number}",
        )
        self.assertEqual(
            repo.contents_url,
            "https://api.github.com/repos/octo-org/octo-repo/contents/{+path}",
        )
        self.assertEqual(
            repo.compare_url,
            "https://api.github.com/repos/octo-org/octo-repo/compare/{base}...{head}",
        )
        self.assertEqual(
            repo.merges_url,
            "https://api.github.com/repos/octo-org/octo-repo/merges",
        )
        self.assertEqual(
            repo.archive_url,
            "https://api.github.com/repos/octo-org/octo-repo/{archive_format}{/ref}",
        )
        self.assertEqual(
            repo.downloads_url,
            "https://api.github.com/repos/octo-org/octo-repo/downloads",
        )
        self.assertEqual(
            repo.issues_url,
            "https://api.github.com/repos/octo-org/octo-repo/issues{/number}",
        )
        self.assertEqual(
            repo.pulls_url,
            "https://api.github.com/repos/octo-org/octo-repo/pulls{/number}",
        )
        self.assertEqual(
            repo.milestones_url,
            "https://api.github.com/repos/octo-org/octo-repo/milestones{/number}",
        )
        self.assertEqual(
            repo.notifications_url,
            "https://api.github.com/repos/octo-org/octo-repo/notifications{?since,all,participating}",
        )
        self.assertEqual(
            repo.labels_url,
            "https://api.github.com/repos/octo-org/octo-repo/labels{/name}",
        )
        self.assertEqual(
            repo.releases_url,
            "https://api.github.com/repos/octo-org/octo-repo/releases{/id}",
        )
        self.assertEqual(
            repo.deployments_url,
            "https://api.github.com/repos/octo-org/octo-repo/deployments",
        )

        user = repo.owner
        self.assertEqual(user.login, "octocat")
        self.assertEqual(user.id, 1)
        self.assertEqual(user.node_id, "MDQ6VXNlcjE=")
        self.assertEqual(
            user.avatar_url, "https://github.com/images/error/octocat_happy.gif"
        )
        self.assertEqual(user.gravatar_id, "")
        self.assertEqual(user.url, "https://api.github.com/users/octocat")
        self.assertEqual(user.html_url, "https://github.com/octocat")
        self.assertEqual(
            user.followers_url, "https://api.github.com/users/octocat/followers"
        )
        self.assertEqual(
            user.following_url,
            "https://api.github.com/users/octocat/following{/other_user}",
        )
        self.assertEqual(
            user.gists_url,
            "https://api.github.com/users/octocat/gists{/gist_id}",
        )
        self.assertEqual(
            user.starred_url,
            "https://api.github.com/users/octocat/starred{/owner}{/repo}",
        )
        self.assertEqual(
            user.subscriptions_url,
            "https://api.github.com/users/octocat/subscriptions",
        )
        self.assertEqual(
            user.organizations_url, "https://api.github.com/users/octocat/orgs"
        )
        self.assertEqual(
            user.repos_url, "https://api.github.com/users/octocat/repos"
        )
        self.assertEqual(
            user.events_url,
            "https://api.github.com/users/octocat/events{/privacy}",
        )
        self.assertEqual(
            user.received_events_url,
            "https://api.github.com/users/octocat/received_events",
        )
        self.assertEqual(user.type, "User")
        self.assertEqual(user.site_admin, False)
pontos-25.3.2/tests/github/script/000077500000000000000000000000001476255566300171005ustar00rootroot00000000000000pontos-25.3.2/tests/github/script/__init__.py000066400000000000000000000001411476255566300212050ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later
#
pontos-25.3.2/tests/github/script/test_load.py000066400000000000000000000067411476255566300214400ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later
#

import io
import unittest
from argparse import ArgumentParser
from contextlib import redirect_stderr

from pontos.github.script.errors import GitHubScriptError
from pontos.github.script.load import (
    load_script,
    run_add_arguments_function,
    run_github_script_function,
)
from pontos.testing import temp_file, temp_python_module
from tests import IsolatedAsyncioTestCase


class LoadScriptTestCase(unittest.TestCase):
    def test_load_script(self):
        with (
            temp_file("def foo():\n\treturn 1", name="foo.py") as f,
            load_script(f) as module,
        ):
            self.assertIsNotNone(module)
            self.assertIsNotNone(module.foo)
            self.assertEqual(module.foo(), 1)

    def test_load_script_module(self):
        with (
            temp_python_module(
                "def foo():\n\treturn 1", name="github-foo-script"
            ),
            load_script("github-foo-script") as module,
        ):
            self.assertIsNotNone(module)
            self.assertIsNotNone(module.foo)
            self.assertEqual(module.foo(), 1)

    def test_load_script_failure(self):
        with self.assertRaisesRegex(
            ModuleNotFoundError, "No module named 'baz'"
        ):
            with load_script("foo/bar/baz.py"):
                pass


class RunAddArgumentsFunction(unittest.TestCase):
    def test_add_arguments_function(self):
        with (
            temp_file(
                "def add_script_arguments(parser):\n\t"
                "parser.add_argument('--foo')",
                name="foo.py",
            ) as f,
            load_script(f) as module,
        ):
            parser = ArgumentParser()
            run_add_arguments_function(module, parser)
            args = parser.parse_args(["--foo", "123"])
            self.assertEqual(args.foo, "123")

    def test_no_add_arguments_function(self):
        with (
            temp_file(
                "def foo():\n\tpass",
                name="foo.py",
            ) as f,
            load_script(f) as module,
        ):
            parser = ArgumentParser()
            run_add_arguments_function(module, parser)
            with self.assertRaises(SystemExit), redirect_stderr(io.StringIO()):
                parser.parse_args(["--foo", "123"])


class RunGithubScriptFunctionTestCase(IsolatedAsyncioTestCase):
    def test_run_async_github_script_function(self):
        with (
            temp_file(
                "async def github_script(api, args):\n\treturn 1",
                name="foo.py",
            ) as f,
            load_script(f) as module,
        ):
            self.assertEqual(
                run_github_script_function(module, "123", 123, {}), 1
            )

    def test_sync_github_script_function_not_supported(self):
        with (
            temp_file(
                "def github_script(api, args):\n\treturn 1",
                name="foo.py",
            ) as f,
            load_script(f) as module,
        ):
            with self.assertRaises(GitHubScriptError):
                run_github_script_function(module, "123", 123, {})

    def test_no_github_script_function(self):
        with (
            temp_file(
                "def foo():\n\tpass",
                name="foo.py",
            ) as f,
            load_script(f) as module,
        ):
            with self.assertRaises(GitHubScriptError):
                run_github_script_function(module, "123", 123, {})
pontos-25.3.2/tests/github/script/test_parser.py000066400000000000000000000026321476255566300220100ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later
#

import unittest
from unittest.mock import patch

from pontos.github.api.helper import DEFAULT_TIMEOUT
from pontos.github.script._parser import create_parser


class CreateParserTestCase(unittest.TestCase):
    def test_create_parser(self):
        parser = create_parser()
        args = parser.parse_args(
            ["--token", "123", "--timeout", "456", "my_script"]
        )

        self.assertEqual(args.token, "123")
        self.assertEqual(args.timeout, 456)
        self.assertEqual(args.script, "my_script")

    @patch.dict("os.environ", {"GITHUB_TOKEN": "987"})
    def test_parse_token(self):
        parser = create_parser()
        args = parser.parse_args(["my_script"])
        self.assertEqual(args.token, "987")
        self.assertEqual(args.script, "my_script")

        args = parser.parse_args(["--token", "123", "my_script"])

        self.assertEqual(args.token, "123")
        self.assertEqual(args.script, "my_script")

    def test_parse_timeout(self):
        parser = create_parser()
        args = parser.parse_args(["my_script"])
        self.assertEqual(args.script, "my_script")
        self.assertEqual(args.timeout, DEFAULT_TIMEOUT)

        args = parser.parse_args(["--timeout", "666", "my_script"])

        self.assertEqual(args.timeout, 666)
        self.assertEqual(args.script, "my_script")
pontos-25.3.2/tests/github/test_cmds.py000066400000000000000000000213761476255566300201440ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later
#

# pylint: disable=no-member

from argparse import Namespace
from pathlib import Path
from unittest.mock import MagicMock, call, patch

from pontos.github.cmds import (
    create_pull_request,
    create_release,
    create_tag,
    file_status,
    labels,
    repos,
    update_pull_request,
)
from pontos.github.models.base import FileStatus
from pontos.github.models.organization import RepositoryType
from pontos.testing import temp_file
from tests import AsyncIteratorMock, IsolatedAsyncioTestCase

here = Path(__file__).parent


class TestCmds(IsolatedAsyncioTestCase):
    @patch("pontos.github.api.api.GitHubAsyncRESTPullRequests", spec=True)
    async def test_file_status(self, api_mock):
        terminal = MagicMock()
        api_mock.return_value.exists.return_value = True
        api_mock.return_value.files.return_value = {
            FileStatus.ADDED: [Path("tests/github/foo/bar")],
            FileStatus.MODIFIED: [
                Path("tests/github/bar/baz"),
                Path("tests/github/baz/boo"),
            ],
        }

        with (
            temp_file(name="some.file") as test_file,
            test_file.open("w", encoding="utf8") as output,
        ):
            args = Namespace(
                repo="foo/bar",
                pull_request=8,
                output=output,
                status=[FileStatus.ADDED, FileStatus.MODIFIED],
                token="GITHUB_TOKEN",
            )

            await file_status(terminal, args)

            api_mock.return_value.exists.assert_awaited_once_with(
                repo="foo/bar", pull_request=8
            )
            api_mock.return_value.files.assert_awaited_once_with(
                repo="foo/bar",
                pull_request=8,
                status_list=[FileStatus.ADDED, FileStatus.MODIFIED],
            )

            output.flush()

            content = test_file.read_text(encoding="utf-8")
            self.assertEqual(
                content, f"{here}/foo/bar\n{here}/bar/baz\n{here}/baz/boo\n"
            )

    @patch("pontos.github.api.api.GitHubAsyncRESTReleases", spec=True)
    async def test_create_release_no_tag(self, api_mock):
        terminal = MagicMock()
        api_mock.return_value.exists.return_value = True

        args = Namespace(
            repo="foo/bar",
            tag="test_tag",
            name="test_release",
            body=None,
            target_commitish=None,
            draft=False,
            prerelease=False,
            token="GITHUB_TOKEN",
        )

        with self.assertRaises(SystemExit) as syse:
            await create_release(terminal, args)

        self.assertEqual(syse.exception.code, 1)

        api_mock.return_value.exists.assert_awaited_once_with(
            repo="foo/bar", tag="test_tag"
        )

    @patch("pontos.github.api.api.GitHubAsyncRESTReleases", spec=True)
    async def test_create_release(self, api_mock):
        terminal = MagicMock()
        api_mock.return_value.exists.return_value = False
        api_mock.return_value.create.return_value = True

        args = Namespace(
            repo="foo/bar",
            tag="test_tag",
            name="test_release",
            body=None,
            target_commitish=None,
            draft=False,
            prerelease=False,
            token="GITHUB_TOKEN",
        )

        await create_release(terminal, args)

        api_mock.return_value.exists.assert_awaited_once_with(
            repo="foo/bar", tag="test_tag"
        )
        api_mock.return_value.create.assert_awaited_once_with(
            repo="foo/bar",
            tag="test_tag",
            body=None,
            name="test_release",
            target_commitish=None,
            draft=False,
            prerelease=False,
        )

    @patch("pontos.github.api.api.GitHubAsyncRESTTags", spec=True)
    async def test_create_tag(self, api_mock):
        terminal = MagicMock()
        api_mock.return_value.create.return_value = MagicMock(sha="123")
        api_mock.return_value.create_tag_reference.return_value = True

        args = Namespace(
            repo="foo/bar",
            tag="test_tag",
            name="test_release",
            message="test msg",
            git_object="commit",
            git_object_type=None,
            email="test@test.test",
            date=None,
            token="GITHUB_TOKEN",
        )

        await create_tag(terminal, args)

        api_mock.return_value.create.assert_awaited_once_with(
            repo="foo/bar",
            tag="test_tag",
            message="test msg",
            git_object="commit",
            name="test_release",
            email="test@test.test",
            git_object_type=None,
            date=None,
        )
        api_mock.return_value.create_tag_reference.assert_awaited_once_with(
            repo="foo/bar", tag="test_tag", sha="123"
        )

    @patch("pontos.github.api.api.GitHubAsyncRESTBranches", spec=True)
    @patch("pontos.github.api.api.GitHubAsyncRESTPullRequests", spec=True)
    async def test_create_pull_request(
        self,
        pulls_api_mock,
        branches_api_mock,
    ):
        terminal = MagicMock()
        branches_api_mock.return_value.exists.return_value = [False, False]
        pulls_api_mock.return_value.create.return_value = True

        args = Namespace(
            repo="foo/bar",
            head="some-branch",
            target="main",
            title="foo",
            body="foo bar",
            token="GITHUB_TOKEN",
        )

        await create_pull_request(terminal, args)

        branches_api_mock.return_value.exists.assert_has_awaits(
            [
                call(repo="foo/bar", branch="some-branch"),
                call(repo="foo/bar", branch="main"),
            ]
        )

        pulls_api_mock.return_value.create.assert_awaited_once_with(
            repo="foo/bar",
            head_branch="some-branch",
            base_branch="main",
            title="foo",
            body="foo bar",
        )

    @patch("pontos.github.api.api.GitHubAsyncRESTBranches", spec=True)
    @patch("pontos.github.api.api.GitHubAsyncRESTPullRequests", spec=True)
    async def test_update_pull_request(
        self,
        pulls_api_mock,
        branches_api_mock,
    ):
        terminal = MagicMock()
        branches_api_mock.return_value.exists.return_value = True
        pulls_api_mock.return_value.create.return_value = True

        args = Namespace(
            repo="foo/bar",
            target="main",
            pull_request=9,
            title="foo",
            body="foo bar",
            token="GITHUB_TOKEN",
        )

        await update_pull_request(terminal, args)

        branches_api_mock.return_value.exists.assert_awaited_once_with(
            repo="foo/bar", branch="main"
        )

        pulls_api_mock.return_value.update.assert_awaited_once_with(
            repo="foo/bar",
            pull_request=9,
            base_branch="main",
            title="foo",
            body="foo bar",
        )

    @patch("pontos.github.api.api.GitHubAsyncRESTPullRequests", spec=True)
    @patch("pontos.github.api.api.GitHubAsyncRESTLabels", spec=True)
    async def test_labels(
        self,
        labels_api_mock,
        pulls_api_mock,
    ):
        terminal = MagicMock()
        pulls_api_mock.return_value.exists.return_value = True
        labels_api_mock.return_value.get_all.return_value = AsyncIteratorMock(
            ["foo", "bar"]
        )

        args = Namespace(
            repo="foo/bar",
            issue=9,
            labels=["baz"],
            token="GITHUB_TOKEN",
        )

        await labels(terminal, args)

        pulls_api_mock.return_value.exists.assert_awaited_once_with(
            repo="foo/bar", pull_request=9
        )

        labels_api_mock.return_value.get_all.assert_called_once_with(
            repo="foo/bar",
            issue=9,
        )

        labels_api_mock.return_value.set_all.assert_awaited_once_with(
            repo="foo/bar", issue=9, labels=["foo", "bar", "baz"]
        )

    @patch("pontos.github.api.api.GitHubAsyncRESTOrganizations", spec=True)
    async def test_repos(self, api_mock):
        terminal = MagicMock()
        api_mock.return_value.exists.return_value = True
        api_mock.return_value.get_repositories.return_value = AsyncIteratorMock(
            ["repo1", "repo2"]
        )

        args = Namespace(
            orga="foo",
            repo="bar",
            path=None,
            type=RepositoryType.PUBLIC,
            token="GITHUB_TOKEN",
        )

        await repos(terminal, args)

        api_mock.return_value.exists.assert_awaited_once_with("foo")
        api_mock.return_value.get_repositories.assert_called_once_with(
            organization="foo", repository_type=RepositoryType.PUBLIC
        )
pontos-25.3.2/tests/github/test_parser.py000066400000000000000000000131661476255566300205100ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later
#

# pylint: disable=no-member

import io
import unittest
from argparse import Namespace
from contextlib import redirect_stderr
from pathlib import Path
from unittest.mock import Mock, patch

from pontos.github._parser import parse_args
from pontos.github.cmds import (
    create_pull_request,
    create_release,
    create_tag,
    file_status,
    update_pull_request,
)
from pontos.github.models.base import FileStatus
from pontos.testing import temp_directory


class TestArgparsing(unittest.TestCase):
    def setUp(self):
        self.term = Mock()

    def test_create_pr_parse_args(self):
        argv = [
            "pr",
            "create",
            "foo/bar",
            "baz",
            "main",
            "baz in main",
        ]
        with patch.dict("os.environ", {}, clear=True):
            parsed_args = parse_args(argv)

        template = Path().cwd() / "pontos/github/pr_template.md"

        self.assertEqual(parsed_args.command, "pr")
        self.assertEqual(parsed_args.token, "GITHUB_TOKEN")
        self.assertEqual(parsed_args.body, template.read_text(encoding="utf-8"))
        self.assertEqual(parsed_args.pr_func, create_pull_request)
        self.assertEqual(parsed_args.repo, "foo/bar")
        self.assertEqual(parsed_args.target, "main")
        self.assertEqual(parsed_args.title, "baz in main")

    def test_create_pr_parse_args_fail(self):
        argv = ["pr", "create", "foo/bar"]

        with self.assertRaises(SystemExit), redirect_stderr(io.StringIO()):
            parse_args(argv)

    def test_update_pr_parse_args(self):
        argv = [
            "-q",
            "--log-file",
            "foo",
            "pr",
            "update",
            "foo/bar",
            "123",
            "--body",
            "foo",
            "--target",
            "main",
            "--title",
            "baz in main",
        ]

        with patch.dict("os.environ", {}, clear=True):
            parsed_args = parse_args(argv)

        self.assertEqual(parsed_args.command, "pr")
        self.assertEqual(parsed_args.token, "GITHUB_TOKEN")
        self.assertEqual(parsed_args.body, "foo")
        self.assertEqual(parsed_args.pr_func, update_pull_request)
        self.assertEqual(parsed_args.repo, "foo/bar")
        self.assertEqual(parsed_args.pull_request, 123)
        self.assertEqual(parsed_args.target, "main")
        self.assertEqual(parsed_args.title, "baz in main")
        self.assertTrue(parsed_args.quiet)
        self.assertEqual(parsed_args.log_file, "foo")

    def test_update_pr_parse_args_fail(self):
        argv = ["pr", "update", "foo/bar"]

        with self.assertRaises(SystemExit), redirect_stderr(io.StringIO()):
            parse_args(argv)

    def test_fs_parse_args(self):
        with temp_directory(change_into=True) as tmp_dir:
            argv = [
                "FS",
                "foo/bar",
                "8",
                "-o",
                "some.file",
            ]

            with patch.dict("os.environ", {}, clear=True):
                parsed_args = parse_args(argv)

            output = open(tmp_dir / "some.file", mode="w", encoding="utf-8")

            expected_args = Namespace(
                command="FS",
                func=file_status,
                repo="foo/bar",
                pull_request=8,
                output=output,
                status=[FileStatus.ADDED, FileStatus.MODIFIED],
                token="GITHUB_TOKEN",
            )

            self.assertEqual(
                type(parsed_args.output), type(expected_args.output)
            )
            self.assertEqual(parsed_args.command, expected_args.command)
            self.assertEqual(parsed_args.func, expected_args.func)
            self.assertEqual(parsed_args.repo, expected_args.repo)
            self.assertEqual(
                parsed_args.pull_request, expected_args.pull_request
            )
            self.assertEqual(parsed_args.status, expected_args.status)
            self.assertEqual(parsed_args.token, expected_args.token)

            output.close()
            parsed_args.output.close()

    def test_create_release_parse_args(self):
        argv = [
            "re",
            "create",
            "foo/bar",
            "123",
            "release_name",
            "--body",
            "foo",
        ]

        with patch.dict("os.environ", {}, clear=True):
            parsed_args = parse_args(argv)

        self.assertEqual(parsed_args.command, "re")
        self.assertEqual(parsed_args.token, "GITHUB_TOKEN")
        self.assertEqual(parsed_args.re_func, create_release)
        self.assertEqual(parsed_args.repo, "foo/bar")
        self.assertEqual(parsed_args.tag, "123")
        self.assertEqual(parsed_args.name, "release_name")
        self.assertEqual(parsed_args.body, "foo")
        self.assertEqual(parsed_args.target_commitish, None)
        self.assertFalse(parsed_args.draft)
        self.assertFalse(parsed_args.prerelease)

    def test_create_tag_parse_args(self):
        argv = [
            "tag",
            "create",
            "foo/bar",
            "v1",
            "test user",
            "",
            "sha",
            "test@test.test",
        ]

        parsed_args = parse_args(argv)

        self.assertEqual(parsed_args.command, "tag")
        self.assertEqual(parsed_args.tag_func, create_tag)
        self.assertEqual(parsed_args.repo, "foo/bar")
        self.assertEqual(parsed_args.tag, "v1")
        self.assertEqual(parsed_args.message, "")
        self.assertEqual(parsed_args.git_object, "sha")
        self.assertEqual(parsed_args.email, "test@test.test")
pontos-25.3.2/tests/models/000077500000000000000000000000001476255566300155755ustar00rootroot00000000000000pontos-25.3.2/tests/models/__init__.py000066400000000000000000000001411476255566300177020ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later
#
pontos-25.3.2/tests/models/test_models.py000066400000000000000000000150231476255566300204720ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later
#

# pylint: disable=no-member, disallowed-name

import unittest
from dataclasses import dataclass, field
from datetime import date, datetime, timedelta, timezone
from typing import Dict, List, Optional, Union

from pontos.models import Model, ModelAttribute, ModelError, dotted_attributes


class DottedAttributesTestCase(unittest.TestCase):
    def test_with_new_class(self):
        class Foo:
            pass

        foo = Foo()
        attrs = {"bar": 123, "hello": "World", "baz": [1, 2, 3]}

        foo = dotted_attributes(foo, attrs)

        self.assertEqual(foo.bar, 123)
        self.assertEqual(foo.baz, [1, 2, 3])
        self.assertEqual(foo.hello, "World")

    def test_with_github_model_attribute(self):
        foo = ModelAttribute()
        attrs = {"bar": 123, "hello": "World", "baz": [1, 2, 3]}

        foo = dotted_attributes(foo, attrs)

        self.assertEqual(foo.bar, 123)
        self.assertEqual(foo.baz, [1, 2, 3])
        self.assertEqual(foo.hello, "World")


class ModelTestCase(unittest.TestCase):
    def test_from_dict(self):
        model = Model.from_dict(
            {
                "x": 1,
                "y": 2,
                "hello": "World",
                "baz": [1, 2, 3],
                "bar": {"a": "b"},
            }
        )

        self.assertEqual(model.x, 1)
        self.assertEqual(model.y, 2)
        self.assertEqual(model.hello, "World")
        self.assertEqual(model.baz, [1, 2, 3])
        self.assertEqual(model.bar.a, "b")

    def test_from_dict_failure(self):
        with self.assertRaisesRegex(
            ValueError, "Invalid data for creating an instance of.*"
        ):
            Model.from_dict("foo")


class ExampleModelTestCase(unittest.TestCase):
    def test_optional(self):
        @dataclass
        class OtherModel(Model):
            something: str

        @dataclass
        class ExampleModel(Model):
            foo: str
            bar: Optional[OtherModel] = None

        model = ExampleModel.from_dict({"foo": "abc"})

        self.assertIsNone(model.bar)

    def test_list(self):
        @dataclass
        class ExampleModel(Model):
            foo: List[str]

        model = ExampleModel.from_dict({"foo": ["a", "b", "c"]})
        self.assertEqual(model.foo, ["a", "b", "c"])

    def test_list_with_default(self):
        @dataclass
        class ExampleModel(Model):
            foo: List[str] = field(default_factory=list)

        model = ExampleModel.from_dict({})
        self.assertEqual(model.foo, [])

    def test_datetime(self):
        @dataclass
        class ExampleModel(Model):
            foo: datetime

        model = ExampleModel.from_dict({"foo": "1988-10-01T04:00:00.000"})
        self.assertEqual(
            model.foo, datetime(1988, 10, 1, 4, tzinfo=timezone.utc)
        )

        model = ExampleModel.from_dict({"foo": "1988-10-01T04:00:00Z"})
        self.assertEqual(
            model.foo, datetime(1988, 10, 1, 4, tzinfo=timezone.utc)
        )

        model = ExampleModel.from_dict({"foo": "1988-10-01T04:00:00+00:00"})
        self.assertEqual(
            model.foo, datetime(1988, 10, 1, 4, tzinfo=timezone.utc)
        )

        model = ExampleModel.from_dict({"foo": "1988-10-01T04:00:00+01:00"})
        self.assertEqual(
            model.foo,
            datetime(1988, 10, 1, 4, tzinfo=timezone(timedelta(hours=1))),
        )

        model = ExampleModel.from_dict({"foo": "2021-06-06T11:15:10.213"})
        self.assertEqual(
            model.foo,
            datetime(2021, 6, 6, 11, 15, 10, 213000, tzinfo=timezone.utc),
        )

    def test_date(self):
        @dataclass
        class ExampleModel(Model):
            foo: date

        model = ExampleModel.from_dict({"foo": "1988-10-01"})

        self.assertEqual(model.foo, date(1988, 10, 1))

    def test_union(self):
        @dataclass
        class ExampleModel(Model):
            foo: Union[str, int]

        model = ExampleModel.from_dict({"foo": "123"})

        self.assertEqual(model.foo, "123")

        model = ExampleModel.from_dict({"foo": 123})
        self.assertEqual(model.foo, 123)

    def test_other_model(self):
        @dataclass
        class OtherModel(Model):
            bar: str

        @dataclass
        class ExampleModel(Model):
            foo: Optional[OtherModel] = None

        model = ExampleModel.from_dict({"foo": {"bar": "baz"}})
        self.assertEqual(model.foo.bar, "baz")

    def test_all(self):
        @dataclass
        class OtherModel(Model):
            something: str

        @dataclass
        class ExampleModel(Model):
            foo: str
            bar: datetime
            id: Union[str, int]
            baz: List[str] = field(default_factory=list)
            ipsum: Optional[OtherModel] = None

        model = ExampleModel.from_dict(
            {
                "foo": "abc",
                "bar": "1988-10-01T04:00:00.000",
                "id": 123,
                "baz": ["a", "b", "c"],
                "ipsum": {"something": "def"},
            }
        )

        self.assertEqual(model.foo, "abc")
        self.assertEqual(
            model.bar, datetime(1988, 10, 1, 4, tzinfo=timezone.utc)
        )
        self.assertEqual(model.id, 123)
        self.assertEqual(model.baz, ["a", "b", "c"])
        self.assertIsNotNone(model.ipsum)
        self.assertEqual(model.ipsum.something, "def")

    def test_list_with_dict(self):
        @dataclass
        class ExampleModel(Model):
            foo: List[Dict]

        model = ExampleModel.from_dict({"foo": [{"a": 1}, {"b": 2}, {"c": 3}]})
        self.assertEqual(model.foo, [{"a": 1}, {"b": 2}, {"c": 3}])

    def test_model_error(self):
        @dataclass
        class ExampleModel(Model):
            foo: Optional[str] = None

        with self.assertRaisesRegex(
            ModelError,
            "Error while creating ExampleModel model. Could not set value for "
            "property 'foo' from '{'bar': 'baz'}'.",
        ):
            ExampleModel.from_dict({"foo": {"bar": "baz"}})

    def test_model_error_2(self):
        @dataclass
        class OtherModel(Model):
            something: str

        @dataclass
        class ExampleModel(Model):
            foo: Optional[OtherModel]

        with self.assertRaisesRegex(
            ModelError,
            "Error while creating ExampleModel model. Could not set value for "
            "property 'foo' from 'abc'.",
        ):
            ExampleModel.from_dict(
                {
                    "foo": "abc",
                }
            )
pontos-25.3.2/tests/models/test_strenum.py000066400000000000000000000011661476255566300207070ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later

import unittest

from pontos.models import StrEnum


class FooEnum(StrEnum):
    A = "a"
    B = "b"


class StrEnumTestCase(unittest.TestCase):
    def test_str(self):
        self.assertEqual(str(FooEnum.A), "a")
        self.assertEqual(str(FooEnum.B), "b")

    def test_str_append(self):
        self.assertEqual("say " + FooEnum.A, "say a")
        self.assertEqual("say " + FooEnum.B, "say b")

    def test_f_string(self):
        self.assertEqual(f"say {FooEnum.A}", "say a")
        self.assertEqual(f"say {FooEnum.B}", "say b")
pontos-25.3.2/tests/nvd/000077500000000000000000000000001476255566300151015ustar00rootroot00000000000000pontos-25.3.2/tests/nvd/__init__.py000066400000000000000000000121121476255566300172070ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later

# pylint: disable=line-too-long

from typing import Any, Dict, Optional


def get_cve_data(data: Optional[Dict[str, Any]] = None) -> Dict[str, Any]:
    cve = {
        "id": "CVE-2022-45536",
        "source_identifier": "cve@mitre.org",
        "published": "2022-11-22T21:15:11.103",
        "last_modified": "2022-11-23T16:02:07.367",
        "vuln_status": "Analyzed",
        "descriptions": [
            {
                "lang": "en",
                "value": "AeroCMS v0.0.1 was discovered to contain a SQL "
                "Injection vulnerability via the id parameter at "
                "\\admin\\post_comments.php. This vulnerability allows "
                "attackers to access database information.",
            }
        ],
        "references": [
            {
                "url": "https://github.com/rdyx0/CVE/blob/master/AeroCMS/AeroCMS-v0.0.1-SQLi/post_comments_sql_injection/post_comments_sql_injection.md",
                "source": "cve@mitre.org",
                "tags": ["Exploit", "Third Party Advisory"],
            },
            {
                "url": "https://rdyx0.github.io/2018/09/07/AeroCMS-v0.0.1-SQLi%20post_comments_sql_injection/",
                "source": "cve@mitre.org",
                "tags": ["Exploit", "Third Party Advisory"],
            },
        ],
    }
    if data:
        cve.update(data)
    return cve


def get_cpe_data(update: Optional[Dict[str, Any]] = None) -> Dict[str, Any]:
    data = {
        "deprecated": False,
        "cpe_name": "cpe:2.3:o:microsoft:windows_10_22h2:-:*:*:*:*:*:arm64:*",
        "cpe_name_id": "9BAECDB2-614D-4E9C-9936-190C30246F03",
        "last_modified": "2022-12-09T18:15:16.973",
        "created": "2022-12-09T16:20:06.943",
    }

    if update:
        data.update(update)
    return data


def get_cpe_match_data(
    update: Optional[Dict[str, Any]] = None,
) -> Dict[str, Any]:
    data = {
        "cpe_last_modified": "2019-07-22T16:37:38.133",
        "created": "2019-06-17T09:16:33.960",
        "criteria": "cpe:2.3:a:sun:jre:*:update3:*:*:*:*:*:*",
        "last_modified": "2019-06-17T09:16:44.000",
        "match_criteria_id": "EAB2C9C2-F685-450B-9980-553966FC3B63",
        "matches": [
            {
                "cpe_name": "cpe:2.3:a:sun:jre:1.3.0:update3:*:*:*:*:*:*",
                "cpe_name_id": "2D284534-DA21-43D5-9D89-07F19AE400EA",
            },
            {
                "cpe_name": "cpe:2.3:a:sun:jre:1.4.1:update3:*:*:*:*:*:*",
                "cpe_name_id": "CE55E1DF-8EA2-41EA-9C51-1BAE728CA094",
            },
            {
                "cpe_name": "cpe:2.3:a:sun:jre:1.4.2:update3:*:*:*:*:*:*",
                "cpe_name_id": "A09C4E47-6548-40C5-8458-5C07C3292C86",
            },
            {
                "cpe_name": "cpe:2.3:a:sun:jre:1.5.0:update3:*:*:*:*:*:*",
                "cpe_name_id": "C484A93A-2677-4501-A6E0-E4ADFFFB549E",
            },
            {
                "cpe_name": "cpe:2.3:a:sun:jre:1.6.0:update3:*:*:*:*:*:*",
                "cpe_name_id": "C518A954-369E-453E-8E17-2AF639150115",
            },
        ],
        "status": "Active",
        "version_end_including": "1.6.0",
    }

    if update:
        data.update(update)
    return data


def get_cve_change_data(
    data: Optional[Dict[str, Any]] = None,
) -> Dict[str, Any]:
    cve_changes = {
        "cve_id": "CVE-2022-0001",
        "event_name": "Initial Analysis",
        "cve_change_id": "5160FDEB-0FF0-457B-AA36-0AEDCAB2522E",
        "source_identifier": "nvd@nist.gov",
        "created": "2022-03-18T20:13:08.123",
        "details": [
            {
                "action": "Added",
                "type": "CVSS V2",
                "new_value": "NIST (AV:L/AC:L/Au:N/C:P/I:N/A:N)",
            },
            {
                "action": "Added",
                "type": "CVSS V3.1",
                "new_value": "NIST AV:L/AC:L/PR:L/UI:N/S:C/C:H/I:N/A:N",
            },
            {
                "action": "Changed",
                "type": "Reference Type",
                "old_value": "http://www.openwall.com/lists/oss-security/2022/03/18/2 No Types Assigned",
                "new_value": "http://www.openwall.com/lists/oss-security/2022/03/18/2 Mailing List, Third Party Advisory",
            },
        ],
    }

    if data:
        cve_changes.update(data)
    return cve_changes


def get_source_data(
    data: Optional[Dict[str, Any]] = None,
) -> Dict[str, Any]:
    sources = {
        "name": "MITRE",
        "contact_email": "cve@mitre.org",
        "source_identifiers": [
            "cve@mitre.org",
            "8254265b-2729-46b6-b9e3-3dfca2d5bfca",
        ],
        "last_modified": "2019-09-09T16:18:45.930",
        "created": "2019-09-09T16:18:45.930",
        "v3_acceptance_level": {
            "description": "Contributor",
            "last_modified": "2025-01-30T00:00:20.107",
        },
        "cwe_acceptance_level": {
            "description": "Reference",
            "last_modified": "2025-01-24T00:00:00.043",
        },
    }

    if data:
        sources.update(data)
    return sources
pontos-25.3.2/tests/nvd/cpe/000077500000000000000000000000001476255566300156505ustar00rootroot00000000000000pontos-25.3.2/tests/nvd/cpe/__init__.py000066400000000000000000000001411476255566300177550ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later
#
pontos-25.3.2/tests/nvd/cpe/test_api.py000066400000000000000000000336561476255566300200470ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later
#

# pylint: disable=line-too-long, arguments-differ, redefined-builtin
# ruff: noqa: E501

from datetime import datetime, timezone
from itertools import repeat
from typing import Any, Optional
from unittest.mock import MagicMock, patch
from uuid import UUID, uuid4

from httpx import AsyncClient, Response

from pontos.errors import PontosError
from pontos.nvd.api import now
from pontos.nvd.cpe.api import MAX_CPES_PER_PAGE, CPEApi
from tests import AsyncMock, IsolatedAsyncioTestCase, aiter, anext
from tests.nvd import get_cpe_data


def uuid_replace_str(uuid: UUID, iteration: int, number: int) -> str:
    id_str = str(uuid).rsplit("-", 2)
    nn = "".join([str(j) for j in repeat(number, 4)])
    ii = "".join([str(j) for j in repeat(iteration, 12)])
    return f"{id_str[0]}-{nn}-{ii}"


def uuid_replace(uuid: UUID, iteration: int, number: int) -> UUID:
    return UUID(uuid_replace_str(uuid, iteration, number))


def create_cpe_response(
    cpe_name_id: UUID,
    *,
    update: Optional[dict[str, Any]] = None,
    results: int = 1,
    iteration: int = 1,
) -> MagicMock:
    products = [
        {
            "cpe": get_cpe_data(
                {
                    "cpe_name_id": f"{uuid_replace_str(cpe_name_id, iteration, i)}"
                }
            )
        }
        for i in range(1, results + 1)
    ]

    data = {
        "products": products,
        "results_per_page": results,
    }
    if update:
        data.update(update)

    response = MagicMock(spec=Response)
    response.json.return_value = data
    return response


def create_cpes_responses(
    cpe_name_id: UUID, responses: int = 2, results_per_response: int = 1
) -> list[MagicMock]:
    return [
        create_cpe_response(
            cpe_name_id=cpe_name_id,
            update={"total_results": responses * results_per_response},
            results=results_per_response,
            iteration=i,
        )
        for i in range(1, responses + 1)
    ]


class CPEApiTestCase(IsolatedAsyncioTestCase):
    @patch("pontos.nvd.api.time.monotonic", autospec=True)
    @patch("pontos.nvd.api.AsyncClient", spec=AsyncClient)
    def setUp(self, async_client: MagicMock, monotonic_mock: MagicMock) -> None:
        self.http_client = AsyncMock()
        async_client.return_value = self.http_client
        monotonic_mock.return_value = 0
        self.api = CPEApi()

    async def test_no_cpe_name_id(self):
        with self.assertRaises(PontosError):
            await self.api.cpe(None)

    async def test_no_cpe(self):
        data = {
            "products": [],
            "results_per_page": 1,
        }
        response = MagicMock(spec=Response)
        response.json.return_value = data
        self.http_client.get.return_value = response

        with self.assertRaises(PontosError):
            await self.api.cpe("CPE-1")

    async def test_cpe(self):
        uuid = uuid4()
        self.http_client.get.return_value = create_cpe_response(uuid)

        cpe = await self.api.cpe(uuid)

        self.http_client.get.assert_awaited_once_with(
            "https://services.nvd.nist.gov/rest/json/cpes/2.0",
            headers={},
            params={"cpeNameId": str(uuid)},
        )

        self.assertEqual(
            cpe.cpe_name,
            "cpe:2.3:o:microsoft:windows_10_22h2:-:*:*:*:*:*:arm64:*",
        )
        self.assertEqual(cpe.cpe_name_id, uuid_replace(uuid, 1, 1))
        self.assertFalse(cpe.deprecated)
        self.assertEqual(
            cpe.last_modified,
            datetime(2022, 12, 9, 18, 15, 16, 973000, tzinfo=timezone.utc),
        )
        self.assertEqual(
            cpe.created,
            datetime(2022, 12, 9, 16, 20, 6, 943000, tzinfo=timezone.utc),
        )

        self.assertEqual(cpe.refs, [])
        self.assertEqual(cpe.titles, [])
        self.assertEqual(cpe.deprecated_by, [])

    @patch("pontos.nvd.api.time.monotonic", autospec=True)
    @patch("pontos.nvd.api.asyncio.sleep", autospec=True)
    async def test_rate_limit(
        self,
        sleep_mock: MagicMock,
        monotonic_mock: MagicMock,
    ):
        uuid = uuid4()
        self.http_client.get.side_effect = create_cpes_responses(uuid, 8)
        monotonic_mock.side_effect = [10, 11]

        it = aiter(self.api.cpes())
        await anext(it)
        await anext(it)
        await anext(it)
        await anext(it)
        await anext(it)

        sleep_mock.assert_not_called()

        await anext(it)

        sleep_mock.assert_called_once_with(20.0)

    @patch("pontos.nvd.cpe.api.now", spec=now)
    async def test_cves_last_modified_start_date(self, now_mock: MagicMock):
        uuid = uuid4()
        now_mock.return_value = datetime(2022, 12, 31)
        self.http_client.get.side_effect = create_cpes_responses(uuid)

        it = aiter(
            self.api.cpes(last_modified_start_date=datetime(2022, 12, 1))
        )
        cve = await anext(it)

        self.assertEqual(cve.cpe_name_id, uuid_replace(uuid, 1, 1))
        self.http_client.get.assert_awaited_once_with(
            "https://services.nvd.nist.gov/rest/json/cpes/2.0",
            headers={},
            params={
                "startIndex": 0,
                "lastModStartDate": "2022-12-01T00:00:00",
                "lastModEndDate": "2022-12-31T00:00:00",
                "resultsPerPage": MAX_CPES_PER_PAGE,
            },
        )

        self.http_client.get.reset_mock()

        cve = await anext(it)

        self.assertEqual(cve.cpe_name_id, uuid_replace(uuid, 2, 1))
        self.http_client.get.assert_awaited_once_with(
            "https://services.nvd.nist.gov/rest/json/cpes/2.0",
            headers={},
            params={
                "startIndex": 1,
                "lastModStartDate": "2022-12-01T00:00:00",
                "lastModEndDate": "2022-12-31T00:00:00",
                "resultsPerPage": 1,
            },
        )

        with self.assertRaises(StopAsyncIteration):
            cve = await anext(it)

    async def test_cves_last_modified_end_date(self):
        uuid = uuid4()
        self.http_client.get.side_effect = create_cpes_responses(uuid)

        it = aiter(
            self.api.cpes(
                last_modified_start_date=datetime(2022, 12, 1),
                last_modified_end_date=datetime(2022, 12, 31),
            )
        )
        cve = await anext(it)

        self.assertEqual(cve.cpe_name_id, uuid_replace(uuid, 1, 1))
        self.http_client.get.assert_awaited_once_with(
            "https://services.nvd.nist.gov/rest/json/cpes/2.0",
            headers={},
            params={
                "startIndex": 0,
                "lastModStartDate": "2022-12-01T00:00:00",
                "lastModEndDate": "2022-12-31T00:00:00",
                "resultsPerPage": MAX_CPES_PER_PAGE,
            },
        )

        self.http_client.get.reset_mock()

        cve = await anext(it)

        self.assertEqual(cve.cpe_name_id, uuid_replace(uuid, 2, 1))
        self.http_client.get.assert_awaited_once_with(
            "https://services.nvd.nist.gov/rest/json/cpes/2.0",
            headers={},
            params={
                "startIndex": 1,
                "lastModStartDate": "2022-12-01T00:00:00",
                "lastModEndDate": "2022-12-31T00:00:00",
                "resultsPerPage": 1,
            },
        )

        with self.assertRaises(StopAsyncIteration):
            cve = await anext(it)

    async def test_cpes_keywords(self):
        uuid = uuid4()
        self.http_client.get.side_effect = create_cpes_responses(uuid)

        it = aiter(self.api.cpes(keywords=["Mac OS X", "kernel"]))
        cve = await anext(it)

        self.assertEqual(cve.cpe_name_id, uuid_replace(uuid, 1, 1))
        self.http_client.get.assert_awaited_once_with(
            "https://services.nvd.nist.gov/rest/json/cpes/2.0",
            headers={},
            params={
                "startIndex": 0,
                "keywordSearch": "Mac OS X kernel",
                "keywordExactMatch": "",
                "resultsPerPage": MAX_CPES_PER_PAGE,
            },
        )

        self.http_client.get.reset_mock()

        cve = await anext(it)

        self.assertEqual(cve.cpe_name_id, uuid_replace(uuid, 2, 1))
        self.http_client.get.assert_awaited_once_with(
            "https://services.nvd.nist.gov/rest/json/cpes/2.0",
            headers={},
            params={
                "startIndex": 1,
                "resultsPerPage": 1,
                "keywordSearch": "Mac OS X kernel",
                "keywordExactMatch": "",
            },
        )

        with self.assertRaises(StopAsyncIteration):
            cve = await anext(it)

    async def test_cpes_keyword(self):
        uuid = uuid4()
        self.http_client.get.side_effect = create_cpes_responses(uuid)

        it = aiter(self.api.cpes(keywords="macOS"))
        cve = await anext(it)

        self.assertEqual(cve.cpe_name_id, uuid_replace(uuid, 1, 1))
        self.http_client.get.assert_awaited_once_with(
            "https://services.nvd.nist.gov/rest/json/cpes/2.0",
            headers={},
            params={
                "startIndex": 0,
                "keywordSearch": "macOS",
                "resultsPerPage": MAX_CPES_PER_PAGE,
            },
        )

        self.http_client.get.reset_mock()

        cve = await anext(it)

        self.assertEqual(cve.cpe_name_id, uuid_replace(uuid, 2, 1))
        self.http_client.get.assert_awaited_once_with(
            "https://services.nvd.nist.gov/rest/json/cpes/2.0",
            headers={},
            params={
                "startIndex": 1,
                "resultsPerPage": 1,
                "keywordSearch": "macOS",
            },
        )

        with self.assertRaises(StopAsyncIteration):
            cve = await anext(it)

    async def test_cpes_cpe_match_string(self):
        uuid = uuid4()
        self.http_client.get.side_effect = create_cpes_responses(uuid)

        it = aiter(
            self.api.cpes(
                cpe_match_string="cpe:2.3:o:microsoft:windows_10:20h2:*:*:*:*:*:*:*"
            )
        )
        cve = await anext(it)

        self.assertEqual(cve.cpe_name_id, uuid_replace(uuid, 1, 1))
        self.http_client.get.assert_awaited_once_with(
            "https://services.nvd.nist.gov/rest/json/cpes/2.0",
            headers={},
            params={
                "startIndex": 0,
                "resultsPerPage": MAX_CPES_PER_PAGE,
                "cpeMatchString": "cpe:2.3:o:microsoft:windows_10:20h2:*:*:*:*:*:*:*",
            },
        )

        self.http_client.get.reset_mock()

        cve = await anext(it)

        self.assertEqual(cve.cpe_name_id, uuid_replace(uuid, 2, 1))
        self.http_client.get.assert_awaited_once_with(
            "https://services.nvd.nist.gov/rest/json/cpes/2.0",
            headers={},
            params={
                "startIndex": 1,
                "resultsPerPage": 1,
                "cpeMatchString": "cpe:2.3:o:microsoft:windows_10:20h2:*:*:*:*:*:*:*",
            },
        )

        with self.assertRaises(StopAsyncIteration):
            cve = await anext(it)

    async def test_cpes_match_criteria_id(self):
        uuid = uuid4()
        self.http_client.get.side_effect = create_cpes_responses(uuid)

        it = aiter(
            self.api.cpes(
                match_criteria_id="36FBCF0F-8CEE-474C-8A04-5075AF53FAF4"
            )
        )
        cve = await anext(it)

        self.assertEqual(cve.cpe_name_id, uuid_replace(uuid, 1, 1))
        self.http_client.get.assert_awaited_once_with(
            "https://services.nvd.nist.gov/rest/json/cpes/2.0",
            headers={},
            params={
                "startIndex": 0,
                "matchCriteriaId": "36FBCF0F-8CEE-474C-8A04-5075AF53FAF4",
                "resultsPerPage": MAX_CPES_PER_PAGE,
            },
        )

        self.http_client.get.reset_mock()

        cve = await anext(it)

        self.assertEqual(cve.cpe_name_id, uuid_replace(uuid, 2, 1))
        self.http_client.get.assert_awaited_once_with(
            "https://services.nvd.nist.gov/rest/json/cpes/2.0",
            headers={},
            params={
                "startIndex": 1,
                "resultsPerPage": 1,
                "matchCriteriaId": "36FBCF0F-8CEE-474C-8A04-5075AF53FAF4",
            },
        )

        with self.assertRaises(StopAsyncIteration):
            cve = await anext(it)

    async def test_cpes_request_results(self):
        uuid = uuid4()
        self.http_client.get.side_effect = create_cpes_responses(
            uuid, results_per_response=2
        )

        it = aiter(self.api.cpes(request_results=10))
        cve = await anext(it)

        self.http_client.get.assert_awaited_once_with(
            "https://services.nvd.nist.gov/rest/json/cpes/2.0",
            headers={},
            params={
                "startIndex": 0,
                "resultsPerPage": 10,
            },
        )
        self.assertEqual(cve.cpe_name_id, uuid_replace(uuid, 1, 1))

        self.http_client.get.reset_mock()
        cve = await anext(it)
        self.assertEqual(cve.cpe_name_id, uuid_replace(uuid, 1, 2))
        self.http_client.get.assert_not_called()

        self.http_client.get.reset_mock()
        cve = await anext(it)
        self.http_client.get.assert_awaited_once_with(
            "https://services.nvd.nist.gov/rest/json/cpes/2.0",
            headers={},
            params={
                "startIndex": 2,
                "resultsPerPage": 2,
            },
        )
        self.assertEqual(cve.cpe_name_id, uuid_replace(uuid, 2, 1))

        self.http_client.get.reset_mock()
        cve = await anext(it)
        self.assertEqual(cve.cpe_name_id, uuid_replace(uuid, 2, 2))
        self.http_client.get.assert_not_called()

        self.http_client.get.reset_mock()

        with self.assertRaises(Exception):
            cve = await anext(it)

    async def test_context_manager(self):
        async with self.api:
            pass

        self.http_client.__aenter__.assert_awaited_once()
        self.http_client.__aexit__.assert_awaited_once()
pontos-25.3.2/tests/nvd/cpe_match/000077500000000000000000000000001476255566300170245ustar00rootroot00000000000000pontos-25.3.2/tests/nvd/cpe_match/__init__.py000066400000000000000000000001341476255566300211330ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2024 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later
#
pontos-25.3.2/tests/nvd/cpe_match/test_api.py000066400000000000000000000433431476255566300212150ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2024 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later
#

# pylint: disable=line-too-long, arguments-differ, redefined-builtin
# ruff: noqa: E501

from datetime import datetime
from typing import Any, Optional
from unittest.mock import MagicMock, patch
from uuid import UUID, uuid4

from httpx import AsyncClient, Response

from pontos.errors import PontosError
from pontos.nvd.api import now
from pontos.nvd.cpe_match.api import MAX_CPE_MATCHES_PER_PAGE, CPEMatchApi
from tests import AsyncMock, IsolatedAsyncioTestCase, aiter, anext
from tests.nvd import get_cpe_match_data


def uuid_replace_str(uuid: UUID, iteration: int, number: int) -> str:
    id_str = str(uuid).rsplit("-", 2)
    return f"{id_str[0]}-{iteration:04}-{number:012}"


def uuid_replace(uuid: UUID, iteration: int, number: int) -> UUID:
    return UUID(uuid_replace_str(uuid, iteration, number))


def generate_cpe_name(iteration: int, number: int) -> str:
    return f"cpe:2.3:a:acme:test-app:1.{iteration-1}.{number-1}:*:*:*:*:*:*:*"


def create_cpe_match_response(
    match_criteria_id: UUID,
    cpe_name_id: UUID,
    *,
    update: Optional[dict[str, Any]] = None,
    results: int = 1,
    iteration: int = 1,
) -> MagicMock:
    match_strings = [
        {
            "match_string": get_cpe_match_data(
                {
                    "match_criteria_id": f"{uuid_replace_str(match_criteria_id, iteration, i)}",
                    "criteria": generate_cpe_name(iteration, i),
                    "matches": [
                        {
                            "cpe_name": generate_cpe_name(iteration, i),
                            "cpe_name_id": f"{uuid_replace_str(cpe_name_id, iteration, i)}",
                        }
                    ],
                }
            )
        }
        for i in range(1, results + 1)
    ]

    data = {
        "match_strings": match_strings,
        "results_per_page": results,
    }
    if update:
        data.update(update)

    response = MagicMock(spec=Response)
    response.json.return_value = data
    return response


def create_cpe_match_responses(
    match_criteria_id: UUID,
    cpe_name_id: UUID,
    responses: int = 2,
    results_per_response: int = 1,
) -> list[MagicMock]:
    return [
        create_cpe_match_response(
            match_criteria_id=match_criteria_id,
            cpe_name_id=cpe_name_id,
            update={"total_results": responses * results_per_response},
            results=results_per_response,
            iteration=i,
        )
        for i in range(1, responses + 1)
    ]


class CPEMatchApiTestCase(IsolatedAsyncioTestCase):
    @patch("pontos.nvd.api.time.monotonic", autospec=True)
    @patch("pontos.nvd.api.AsyncClient", spec=AsyncClient)
    def setUp(self, async_client: MagicMock, monotonic_mock: MagicMock) -> None:
        self.http_client = AsyncMock()
        async_client.return_value = self.http_client
        monotonic_mock.return_value = 0
        self.api = CPEMatchApi()

    async def test_no_match_criteria_id(self):
        with self.assertRaises(PontosError):
            await self.api.cpe_match(None)

    async def test_no_match_strings(self):
        data = {
            "match_strings": [],
            "results_per_page": 1,
        }
        response = MagicMock(spec=Response)
        response.json.return_value = data
        self.http_client.get.return_value = response

        with self.assertRaises(PontosError):
            await self.api.cpe_match("DOES-NOT-EXIST")

    async def test_cpe_match(self):
        match_criteria_id = uuid_replace(uuid4(), 1, 1)
        cpe_name_id = uuid_replace(uuid4(), 1, 1)

        self.http_client.get.return_value = create_cpe_match_response(
            match_criteria_id, cpe_name_id
        )

        cpe_match_string = await self.api.cpe_match(match_criteria_id)

        self.http_client.get.assert_awaited_once_with(
            "https://services.nvd.nist.gov/rest/json/cpematch/2.0",
            headers={},
            params={"matchCriteriaId": str(match_criteria_id)},
        )

        self.assertEqual(
            match_criteria_id,
            cpe_match_string.match_criteria_id,
        )
        self.assertEqual(
            generate_cpe_name(1, 1),
            cpe_match_string.criteria,
        )
        self.assertEqual(
            cpe_name_id,
            cpe_match_string.matches[0].cpe_name_id,
        )
        self.assertEqual(
            generate_cpe_name(1, 1),
            cpe_match_string.matches[0].cpe_name,
        )

    @patch("pontos.nvd.cpe_match.api.now", spec=now)
    async def test_cpe_matches_last_modified_start_date(
        self, now_mock: MagicMock
    ):
        match_criteria_id = uuid4()
        cpe_name_id = uuid4()

        now_mock.return_value = datetime(2019, 8, 30)
        self.http_client.get.side_effect = create_cpe_match_responses(
            match_criteria_id, cpe_name_id
        )

        it = aiter(
            self.api.cpe_matches(last_modified_start_date=datetime(2019, 6, 1))
        )
        cpe_match = await anext(it)

        self.assertEqual(
            uuid_replace(match_criteria_id, 1, 1), cpe_match.match_criteria_id
        )
        self.http_client.get.assert_awaited_once_with(
            "https://services.nvd.nist.gov/rest/json/cpematch/2.0",
            headers={},
            params={
                "startIndex": 0,
                "lastModStartDate": "2019-06-01T00:00:00",
                "lastModEndDate": "2019-08-30T00:00:00",
                "resultsPerPage": MAX_CPE_MATCHES_PER_PAGE,
            },
        )

        self.http_client.get.reset_mock()

        cpe_match = await anext(it)

        self.assertEqual(
            uuid_replace(match_criteria_id, 2, 1), cpe_match.match_criteria_id
        )
        self.http_client.get.assert_awaited_once_with(
            "https://services.nvd.nist.gov/rest/json/cpematch/2.0",
            headers={},
            params={
                "startIndex": 1,
                "lastModStartDate": "2019-06-01T00:00:00",
                "lastModEndDate": "2019-08-30T00:00:00",
                "resultsPerPage": 1,
            },
        )

        with self.assertRaises(StopAsyncIteration):
            cpe_match = await anext(it)

    @patch("pontos.nvd.cpe_match.api.now", spec=now)
    async def test_cpe_matches_last_modified_end_date(
        self, now_mock: MagicMock
    ):
        match_criteria_id = uuid4()
        cpe_name_id = uuid4()

        now_mock.return_value = datetime(2019, 8, 30)
        self.http_client.get.side_effect = create_cpe_match_responses(
            match_criteria_id, cpe_name_id
        )

        it = aiter(
            self.api.cpe_matches(last_modified_end_date=datetime(2019, 8, 1))
        )
        cpe_match = await anext(it)

        self.assertEqual(
            uuid_replace(match_criteria_id, 1, 1), cpe_match.match_criteria_id
        )
        self.http_client.get.assert_awaited_once_with(
            "https://services.nvd.nist.gov/rest/json/cpematch/2.0",
            headers={},
            params={
                "startIndex": 0,
                "lastModEndDate": "2019-08-01T00:00:00",
                "resultsPerPage": MAX_CPE_MATCHES_PER_PAGE,
            },
        )

        self.http_client.get.reset_mock()

        cpe_match = await anext(it)

        self.assertEqual(
            uuid_replace(match_criteria_id, 2, 1), cpe_match.match_criteria_id
        )
        self.http_client.get.assert_awaited_once_with(
            "https://services.nvd.nist.gov/rest/json/cpematch/2.0",
            headers={},
            params={
                "startIndex": 1,
                "lastModEndDate": "2019-08-01T00:00:00",
                "resultsPerPage": 1,
            },
        )

        with self.assertRaises(StopAsyncIteration):
            cpe_match = await anext(it)

    async def test_cpe_matches_cve_id(self):
        match_criteria_id = uuid4()
        cpe_name_id = uuid4()

        self.http_client.get.side_effect = create_cpe_match_responses(
            match_criteria_id, cpe_name_id
        )

        it = aiter(self.api.cpe_matches(cve_id="CVE-2010-3574"))
        cpe_match = await anext(it)

        self.assertEqual(
            uuid_replace(match_criteria_id, 1, 1), cpe_match.match_criteria_id
        )
        self.http_client.get.assert_awaited_once_with(
            "https://services.nvd.nist.gov/rest/json/cpematch/2.0",
            headers={},
            params={
                "startIndex": 0,
                "cveId": "CVE-2010-3574",
                "resultsPerPage": MAX_CPE_MATCHES_PER_PAGE,
            },
        )

        self.http_client.get.reset_mock()

        cpe_match = await anext(it)

        self.assertEqual(
            uuid_replace(match_criteria_id, 2, 1), cpe_match.match_criteria_id
        )
        self.http_client.get.assert_awaited_once_with(
            "https://services.nvd.nist.gov/rest/json/cpematch/2.0",
            headers={},
            params={
                "startIndex": 1,
                "cveId": "CVE-2010-3574",
                "resultsPerPage": 1,
            },
        )

        with self.assertRaises(StopAsyncIteration):
            cpe_match = await anext(it)

    async def test_cpe_matches_match_string_search(self):
        match_criteria_id = uuid4()
        cpe_name_id = uuid4()

        self.http_client.get.side_effect = create_cpe_match_responses(
            match_criteria_id, cpe_name_id
        )

        it = aiter(
            self.api.cpe_matches(
                match_string_search="cpe:2.3:a:sun:jre:*:*:*:*:*:*:*:*"
            )
        )
        cpe_match = await anext(it)

        self.assertEqual(
            uuid_replace(match_criteria_id, 1, 1), cpe_match.match_criteria_id
        )
        self.http_client.get.assert_awaited_once_with(
            "https://services.nvd.nist.gov/rest/json/cpematch/2.0",
            headers={},
            params={
                "startIndex": 0,
                "matchStringSearch": "cpe:2.3:a:sun:jre:*:*:*:*:*:*:*:*",
                "resultsPerPage": MAX_CPE_MATCHES_PER_PAGE,
            },
        )

        self.http_client.get.reset_mock()

        cpe_match = await anext(it)

        self.assertEqual(
            uuid_replace(match_criteria_id, 2, 1), cpe_match.match_criteria_id
        )
        self.http_client.get.assert_awaited_once_with(
            "https://services.nvd.nist.gov/rest/json/cpematch/2.0",
            headers={},
            params={
                "startIndex": 1,
                "matchStringSearch": "cpe:2.3:a:sun:jre:*:*:*:*:*:*:*:*",
                "resultsPerPage": 1,
            },
        )

        with self.assertRaises(StopAsyncIteration):
            cpe_match = await anext(it)

    async def test_cpe_matches_request_results(self):
        match_criteria_id = uuid4()
        cpe_name_id = uuid4()

        self.http_client.get.side_effect = create_cpe_match_responses(
            match_criteria_id=match_criteria_id,
            cpe_name_id=cpe_name_id,
            results_per_response=2,
        )

        it = aiter(self.api.cpe_matches(request_results=10))
        cpe_match = await anext(it)

        self.assertEqual(
            uuid_replace(match_criteria_id, 1, 1), cpe_match.match_criteria_id
        )
        self.http_client.get.assert_awaited_once_with(
            "https://services.nvd.nist.gov/rest/json/cpematch/2.0",
            headers={},
            params={
                "startIndex": 0,
                "resultsPerPage": 10,
            },
        )

        self.http_client.get.reset_mock()
        cpe_match = await anext(it)
        self.assertEqual(
            uuid_replace(match_criteria_id, 1, 2), cpe_match.match_criteria_id
        )
        self.http_client.get.assert_not_called()

        self.http_client.get.reset_mock()

        cpe_match = await anext(it)

        self.assertEqual(
            uuid_replace(match_criteria_id, 2, 1), cpe_match.match_criteria_id
        )
        self.http_client.get.assert_awaited_once_with(
            "https://services.nvd.nist.gov/rest/json/cpematch/2.0",
            headers={},
            params={
                "startIndex": 2,
                "resultsPerPage": 2,
            },
        )

        self.http_client.get.reset_mock()
        cpe_match = await anext(it)
        self.assertEqual(
            uuid_replace(match_criteria_id, 2, 2), cpe_match.match_criteria_id
        )
        self.http_client.get.assert_not_called()

        with self.assertRaises(Exception):
            cpe_match = await anext(it)

    async def test_cpe_match_caching(self):
        match_criteria_id = uuid4()
        cpe_name_id = uuid4()

        responses = create_cpe_match_responses(
            match_criteria_id=match_criteria_id,
            cpe_name_id=cpe_name_id,
            results_per_response=3,
        )
        self.http_client.get.side_effect = responses
        response_matches = [
            [
                match_string["match_string"]["matches"]
                for match_string in response.json.return_value["match_strings"]
            ]
            for response in responses
        ]

        # Make matches of first match_string identical in each response
        response_matches[1][0][0]["cpe_name"] = response_matches[0][0][0][
            "cpe_name"
        ]
        response_matches[1][0][0]["cpe_name_id"] = response_matches[0][0][0][
            "cpe_name_id"
        ]
        # Make matches of second match_string only have the same cpe_name_id
        response_matches[1][1][0]["cpe_name_id"] = response_matches[0][1][0][
            "cpe_name_id"
        ]
        # Leave matches of third match_string different from each other

        it = aiter(self.api.cpe_matches(request_results=10))
        received = [item async for item in it]

        # First matches in each response of three items must be identical objects
        self.assertIs(received[0].matches[0], received[3].matches[0])

        # Second matches in each response of three items must only have same cpe_name_id
        self.assertIsNot(received[1].matches[0], received[4].matches[0])
        self.assertEqual(
            received[1].matches[0].cpe_name_id,
            received[4].matches[0].cpe_name_id,
        )
        self.assertNotEqual(
            received[1].matches[0].cpe_name, received[4].matches[0].cpe_name
        )

        # Third matches in each response of three items must be different
        self.assertIsNot(received[2].matches[0], received[5].matches[0])
        self.assertNotEqual(
            received[2].matches[0].cpe_name_id,
            received[5].matches[0].cpe_name_id,
        )
        self.assertNotEqual(
            received[2].matches[0].cpe_name, received[5].matches[0].cpe_name
        )

    async def test_context_manager(self):
        async with self.api:
            pass

        self.http_client.__aenter__.assert_awaited_once()
        self.http_client.__aexit__.assert_awaited_once()


class CPEMatchApiWithTokenTestCase(IsolatedAsyncioTestCase):
    @patch("pontos.nvd.api.time.monotonic", autospec=True)
    @patch("pontos.nvd.api.AsyncClient", spec=AsyncClient)
    def setUp(self, async_client: MagicMock, monotonic_mock: MagicMock) -> None:
        self.http_client = AsyncMock()
        async_client.return_value = self.http_client
        monotonic_mock.return_value = 0
        self.api = CPEMatchApi(token="token123")

    async def test_cpe_matches_request_results_with_token(self):
        match_criteria_id = uuid4()
        cpe_name_id = uuid4()

        self.http_client.get.side_effect = create_cpe_match_responses(
            match_criteria_id=match_criteria_id,
            cpe_name_id=cpe_name_id,
            results_per_response=2,
        )

        it = aiter(self.api.cpe_matches(request_results=10))
        cpe_match = await anext(it)

        self.assertEqual(
            uuid_replace(match_criteria_id, 1, 1), cpe_match.match_criteria_id
        )
        self.http_client.get.assert_awaited_once_with(
            "https://services.nvd.nist.gov/rest/json/cpematch/2.0",
            headers={"apiKey": "token123"},
            params={
                "startIndex": 0,
                "resultsPerPage": 10,
            },
        )

        self.http_client.get.reset_mock()
        cpe_match = await anext(it)
        self.assertEqual(
            uuid_replace(match_criteria_id, 1, 2), cpe_match.match_criteria_id
        )
        self.http_client.get.assert_not_called()

        self.http_client.get.reset_mock()

        cpe_match = await anext(it)

        self.assertEqual(
            uuid_replace(match_criteria_id, 2, 1), cpe_match.match_criteria_id
        )
        self.http_client.get.assert_awaited_once_with(
            "https://services.nvd.nist.gov/rest/json/cpematch/2.0",
            headers={"apiKey": "token123"},
            params={
                "startIndex": 2,
                "resultsPerPage": 2,
            },
        )

        self.http_client.get.reset_mock()
        cpe_match = await anext(it)
        self.assertEqual(
            uuid_replace(match_criteria_id, 2, 2), cpe_match.match_criteria_id
        )
        self.http_client.get.assert_not_called()

        with self.assertRaises(Exception):
            cpe_match = await anext(it)

    @patch("pontos.nvd.api.time.monotonic", autospec=True)
    @patch("pontos.nvd.api.asyncio.sleep", autospec=True)
    async def test_rate_limit_with_token(
        self,
        sleep_mock: MagicMock,
        monotonic_mock: MagicMock,
    ):
        match_criteria_id = uuid4()
        cpe_name_id = uuid4()
        self.http_client.get.side_effect = create_cpe_match_responses(
            match_criteria_id, cpe_name_id, 8
        )
        monotonic_mock.side_effect = [10, 11]

        it = aiter(self.api.cpe_matches())
        await anext(it)
        await anext(it)
        await anext(it)
        await anext(it)
        await anext(it)

        sleep_mock.assert_not_called()

        await anext(it)

        sleep_mock.assert_not_called()
pontos-25.3.2/tests/nvd/cve/000077500000000000000000000000001476255566300156565ustar00rootroot00000000000000pontos-25.3.2/tests/nvd/cve/__init__.py000066400000000000000000000001411476255566300177630ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later
#
pontos-25.3.2/tests/nvd/cve/test_api.py000066400000000000000000000715761476255566300200600ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later
#

# pylint: disable=line-too-long, arguments-differ, redefined-builtin

from datetime import datetime, timezone
from typing import Any, Dict, List, Optional
from unittest.mock import MagicMock, patch

from httpx import AsyncClient, Response

from pontos.errors import PontosError
from pontos.nvd.api import now
from pontos.nvd.cve.api import MAX_CVES_PER_PAGE, CVEApi
from pontos.nvd.models import cvss_v2, cvss_v3
from tests import AsyncMock, IsolatedAsyncioTestCase, aiter, anext
from tests.nvd import get_cve_data


def create_cve_response(
    cve_id: str,
    *,
    update: Optional[Dict[str, Any]] = None,
    results: int = 1,
) -> MagicMock:
    data = {
        "vulnerabilities": [
            {"cve": get_cve_data({"id": f"{cve_id}-{i}"})}
            for i in range(1, results + 1)
        ],
        "results_per_page": results,
    }
    if update:
        data.update(update)

    response = MagicMock(spec=Response)
    response.json.return_value = data
    return response


def create_cves_responses(
    requests: int = 2, results_per_response: int = 1
) -> List[MagicMock]:
    return [
        create_cve_response(
            f"CVE-{i}",
            update={"total_results": requests * results_per_response},
            results=results_per_response,
        )
        for i in range(1, requests + 1)
    ]


class CVEApiTestCase(IsolatedAsyncioTestCase):
    @patch("pontos.nvd.api.time.monotonic", autospec=True)
    @patch("pontos.nvd.api.AsyncClient", spec=AsyncClient)
    def setUp(
        self,
        async_client: MagicMock,
        monotonic_mock: MagicMock,
    ) -> None:
        self.http_client = AsyncMock()
        async_client.return_value = self.http_client
        monotonic_mock.return_value = 0
        self.api = CVEApi(token="token")

    async def test_no_cve_id(self):
        with self.assertRaises(PontosError):
            await self.api.cve(None)

    async def test_no_cve(self):
        data = {
            "vulnerabilities": [],
            "results_per_page": 1,
        }
        response = MagicMock(spec=Response)
        response.json.return_value = data
        self.http_client.get.return_value = response

        with self.assertRaises(PontosError):
            await self.api.cve("CVE-1")

    async def test_cve(self):
        data = {"vulnerabilities": [{"cve": get_cve_data()}]}
        response = MagicMock(spec=Response)
        response.json.return_value = data
        self.http_client.get.return_value = response

        cve = await self.api.cve("FOO-BAR")

        self.http_client.get.assert_awaited_once_with(
            "https://services.nvd.nist.gov/rest/json/cves/2.0",
            headers={"apiKey": "token"},
            params={"cveId": "FOO-BAR"},
        )

        self.assertEqual(cve.id, "CVE-2022-45536")
        self.assertEqual(cve.source_identifier, "cve@mitre.org")
        self.assertEqual(
            cve.published,
            datetime(2022, 11, 22, 21, 15, 11, 103000, tzinfo=timezone.utc),
        )
        self.assertEqual(
            cve.last_modified,
            datetime(2022, 11, 23, 16, 2, 7, 367000, tzinfo=timezone.utc),
        )
        self.assertEqual(len(cve.descriptions), 1)
        self.assertEqual(len(cve.references), 2)
        self.assertEqual(len(cve.weaknesses), 0)
        self.assertEqual(len(cve.configurations), 0)
        self.assertEqual(len(cve.vendor_comments), 0)
        self.assertIsNone(cve.metrics)
        self.assertIsNone(cve.evaluator_comment)
        self.assertIsNone(cve.evaluator_solution)
        self.assertIsNone(cve.evaluator_impact)
        self.assertIsNone(cve.cisa_exploit_add)
        self.assertIsNone(cve.cisa_action_due)
        self.assertIsNone(cve.cisa_required_action)
        self.assertIsNone(cve.cisa_vulnerability_name)

    async def test_cves(self):
        self.http_client.get.side_effect = create_cves_responses()

        it = aiter(self.api.cves())
        cve = await anext(it)

        self.assertEqual(cve.id, "CVE-1-1")
        self.http_client.get.assert_awaited_once_with(
            "https://services.nvd.nist.gov/rest/json/cves/2.0",
            headers={"apiKey": "token"},
            params={
                "startIndex": 0,
                "resultsPerPage": MAX_CVES_PER_PAGE,
            },
        )

        self.http_client.get.reset_mock()

        cve = await anext(it)

        self.assertEqual(cve.id, "CVE-2-1")
        self.http_client.get.assert_awaited_once_with(
            "https://services.nvd.nist.gov/rest/json/cves/2.0",
            headers={"apiKey": "token"},
            params={"startIndex": 1, "resultsPerPage": 1},
        )

        with self.assertRaises(StopAsyncIteration):
            cve = await anext(it)

    @patch("pontos.nvd.cve.api.now", spec=now)
    async def test_cves_last_modified_start_date(self, now_mock: MagicMock):
        now_mock.return_value = datetime(2022, 12, 31)
        self.http_client.get.side_effect = create_cves_responses()

        it = aiter(
            self.api.cves(last_modified_start_date=datetime(2022, 12, 1))
        )
        cve = await anext(it)

        self.assertEqual(cve.id, "CVE-1-1")
        self.http_client.get.assert_awaited_once_with(
            "https://services.nvd.nist.gov/rest/json/cves/2.0",
            headers={"apiKey": "token"},
            params={
                "startIndex": 0,
                "lastModStartDate": "2022-12-01T00:00:00",
                "lastModEndDate": "2022-12-31T00:00:00",
                "resultsPerPage": MAX_CVES_PER_PAGE,
            },
        )

        self.http_client.get.reset_mock()

        cve = await anext(it)

        self.assertEqual(cve.id, "CVE-2-1")
        self.http_client.get.assert_awaited_once_with(
            "https://services.nvd.nist.gov/rest/json/cves/2.0",
            headers={"apiKey": "token"},
            params={
                "startIndex": 1,
                "lastModStartDate": "2022-12-01T00:00:00",
                "lastModEndDate": "2022-12-31T00:00:00",
                "resultsPerPage": 1,
            },
        )

        with self.assertRaises(StopAsyncIteration):
            cve = await anext(it)

    async def test_cves_last_modified_end_date(self):
        self.http_client.get.side_effect = create_cves_responses()

        it = aiter(
            self.api.cves(
                last_modified_start_date=datetime(2022, 12, 1),
                last_modified_end_date=datetime(2022, 12, 31),
            )
        )
        cve = await anext(it)

        self.assertEqual(cve.id, "CVE-1-1")
        self.http_client.get.assert_awaited_once_with(
            "https://services.nvd.nist.gov/rest/json/cves/2.0",
            headers={"apiKey": "token"},
            params={
                "startIndex": 0,
                "lastModStartDate": "2022-12-01T00:00:00",
                "lastModEndDate": "2022-12-31T00:00:00",
                "resultsPerPage": MAX_CVES_PER_PAGE,
            },
        )

        self.http_client.get.reset_mock()

        cve = await anext(it)

        self.assertEqual(cve.id, "CVE-2-1")
        self.http_client.get.assert_awaited_once_with(
            "https://services.nvd.nist.gov/rest/json/cves/2.0",
            headers={"apiKey": "token"},
            params={
                "startIndex": 1,
                "lastModStartDate": "2022-12-01T00:00:00",
                "lastModEndDate": "2022-12-31T00:00:00",
                "resultsPerPage": 1,
            },
        )

        with self.assertRaises(StopAsyncIteration):
            cve = await anext(it)

    @patch("pontos.nvd.cve.api.now", spec=now)
    async def test_cves_published_start_date(self, now_mock: MagicMock):
        now_mock.return_value = datetime(2022, 12, 31)
        self.http_client.get.side_effect = create_cves_responses()

        it = aiter(self.api.cves(published_start_date=datetime(2022, 12, 1)))
        cve = await anext(it)

        self.assertEqual(cve.id, "CVE-1-1")
        self.http_client.get.assert_awaited_once_with(
            "https://services.nvd.nist.gov/rest/json/cves/2.0",
            headers={"apiKey": "token"},
            params={
                "startIndex": 0,
                "pubStartDate": "2022-12-01T00:00:00",
                "pubEndDate": "2022-12-31T00:00:00",
                "resultsPerPage": MAX_CVES_PER_PAGE,
            },
        )

        self.http_client.get.reset_mock()

        cve = await anext(it)

        self.assertEqual(cve.id, "CVE-2-1")
        self.http_client.get.assert_awaited_once_with(
            "https://services.nvd.nist.gov/rest/json/cves/2.0",
            headers={"apiKey": "token"},
            params={
                "startIndex": 1,
                "pubStartDate": "2022-12-01T00:00:00",
                "pubEndDate": "2022-12-31T00:00:00",
                "resultsPerPage": 1,
            },
        )

        with self.assertRaises(StopAsyncIteration):
            cve = await anext(it)

    async def test_cves_published_end_date(self):
        self.http_client.get.side_effect = create_cves_responses()

        it = aiter(
            self.api.cves(
                published_start_date=datetime(2022, 12, 1),
                published_end_date=datetime(2022, 12, 31),
            )
        )
        cve = await anext(it)

        self.assertEqual(cve.id, "CVE-1-1")
        self.http_client.get.assert_awaited_once_with(
            "https://services.nvd.nist.gov/rest/json/cves/2.0",
            headers={"apiKey": "token"},
            params={
                "startIndex": 0,
                "pubStartDate": "2022-12-01T00:00:00",
                "pubEndDate": "2022-12-31T00:00:00",
                "resultsPerPage": MAX_CVES_PER_PAGE,
            },
        )

        self.http_client.get.reset_mock()

        cve = await anext(it)

        self.assertEqual(cve.id, "CVE-2-1")
        self.http_client.get.assert_awaited_once_with(
            "https://services.nvd.nist.gov/rest/json/cves/2.0",
            headers={"apiKey": "token"},
            params={
                "startIndex": 1,
                "pubStartDate": "2022-12-01T00:00:00",
                "pubEndDate": "2022-12-31T00:00:00",
                "resultsPerPage": 1,
            },
        )

        with self.assertRaises(StopAsyncIteration):
            cve = await anext(it)

    async def test_cves_cpe_name(self):
        self.http_client.get.side_effect = create_cves_responses()

        it = aiter(self.api.cves(cpe_name="foo-bar"))
        cve = await anext(it)

        self.assertEqual(cve.id, "CVE-1-1")
        self.http_client.get.assert_awaited_once_with(
            "https://services.nvd.nist.gov/rest/json/cves/2.0",
            headers={"apiKey": "token"},
            params={
                "startIndex": 0,
                "cpeName": "foo-bar",
                "resultsPerPage": MAX_CVES_PER_PAGE,
            },
        )

        self.http_client.get.reset_mock()

        cve = await anext(it)

        self.assertEqual(cve.id, "CVE-2-1")
        self.http_client.get.assert_awaited_once_with(
            "https://services.nvd.nist.gov/rest/json/cves/2.0",
            headers={"apiKey": "token"},
            params={
                "startIndex": 1,
                "cpeName": "foo-bar",
                "resultsPerPage": 1,
            },
        )

        with self.assertRaises(StopAsyncIteration):
            cve = await anext(it)

    async def test_cves_is_vulnerable(self):
        self.http_client.get.side_effect = create_cves_responses()

        it = aiter(self.api.cves(cpe_name="foo-bar", is_vulnerable=True))
        cve = await anext(it)

        self.assertEqual(cve.id, "CVE-1-1")
        self.http_client.get.assert_awaited_once_with(
            "https://services.nvd.nist.gov/rest/json/cves/2.0",
            headers={"apiKey": "token"},
            params={
                "startIndex": 0,
                "cpeName": "foo-bar",
                "isVulnerable": "",
                "resultsPerPage": MAX_CVES_PER_PAGE,
            },
        )

        self.http_client.get.reset_mock()

        cve = await anext(it)

        self.assertEqual(cve.id, "CVE-2-1")
        self.http_client.get.assert_awaited_once_with(
            "https://services.nvd.nist.gov/rest/json/cves/2.0",
            headers={"apiKey": "token"},
            params={
                "startIndex": 1,
                "cpeName": "foo-bar",
                "isVulnerable": "",
                "resultsPerPage": 1,
            },
        )

        with self.assertRaises(StopAsyncIteration):
            cve = await anext(it)

    async def test_cves_cvss_v2_vector(self):
        self.http_client.get.side_effect = create_cves_responses()

        it = aiter(self.api.cves(cvss_v2_vector="AV:N/AC:M/Au:N/C:N/I:P/A:N"))
        cve = await anext(it)

        self.assertEqual(cve.id, "CVE-1-1")
        self.http_client.get.assert_awaited_once_with(
            "https://services.nvd.nist.gov/rest/json/cves/2.0",
            headers={"apiKey": "token"},
            params={
                "startIndex": 0,
                "cvssV2Metrics": "AV:N/AC:M/Au:N/C:N/I:P/A:N",
                "resultsPerPage": MAX_CVES_PER_PAGE,
            },
        )

        self.http_client.get.reset_mock()

        cve = await anext(it)

        self.assertEqual(cve.id, "CVE-2-1")
        self.http_client.get.assert_awaited_once_with(
            "https://services.nvd.nist.gov/rest/json/cves/2.0",
            headers={"apiKey": "token"},
            params={
                "startIndex": 1,
                "resultsPerPage": 1,
                "cvssV2Metrics": "AV:N/AC:M/Au:N/C:N/I:P/A:N",
            },
        )

        with self.assertRaises(StopAsyncIteration):
            cve = await anext(it)

    async def test_cves_cvss_v3_vector(self):
        self.http_client.get.side_effect = create_cves_responses()

        it = aiter(
            self.api.cves(
                cvss_v3_vector="CVSS:3.1/AV:N/AC:L/PR:H/UI:N/S:U/C:H/I:N/A:N"
            )
        )
        cve = await anext(it)

        self.assertEqual(cve.id, "CVE-1-1")
        self.http_client.get.assert_awaited_once_with(
            "https://services.nvd.nist.gov/rest/json/cves/2.0",
            headers={"apiKey": "token"},
            params={
                "startIndex": 0,
                "cvssV3Metrics": "CVSS:3.1/AV:N/AC:L/PR:H/UI:N/S:U/C:H/I:N/A:N",
                "resultsPerPage": MAX_CVES_PER_PAGE,
            },
        )

        self.http_client.get.reset_mock()

        cve = await anext(it)

        self.assertEqual(cve.id, "CVE-2-1")
        self.http_client.get.assert_awaited_once_with(
            "https://services.nvd.nist.gov/rest/json/cves/2.0",
            headers={"apiKey": "token"},
            params={
                "startIndex": 1,
                "resultsPerPage": 1,
                "cvssV3Metrics": "CVSS:3.1/AV:N/AC:L/PR:H/UI:N/S:U/C:H/I:N/A:N",
            },
        )

        with self.assertRaises(StopAsyncIteration):
            cve = await anext(it)

    async def test_cves_cvss_v2_severity(self):
        self.http_client.get.side_effect = create_cves_responses()

        it = aiter(self.api.cves(cvss_v2_severity=cvss_v2.Severity.HIGH))
        cve = await anext(it)

        self.assertEqual(cve.id, "CVE-1-1")
        self.http_client.get.assert_awaited_once_with(
            "https://services.nvd.nist.gov/rest/json/cves/2.0",
            headers={"apiKey": "token"},
            params={
                "startIndex": 0,
                "cvssV2Severity": "HIGH",
                "resultsPerPage": MAX_CVES_PER_PAGE,
            },
        )

        self.http_client.get.reset_mock()

        cve = await anext(it)

        self.assertEqual(cve.id, "CVE-2-1")
        self.http_client.get.assert_awaited_once_with(
            "https://services.nvd.nist.gov/rest/json/cves/2.0",
            headers={"apiKey": "token"},
            params={
                "startIndex": 1,
                "resultsPerPage": 1,
                "cvssV2Severity": "HIGH",
            },
        )

        with self.assertRaises(StopAsyncIteration):
            cve = await anext(it)

    async def test_cves_cvss_v3_severity(self):
        self.http_client.get.side_effect = create_cves_responses()

        it = aiter(self.api.cves(cvss_v3_severity=cvss_v3.Severity.HIGH))
        cve = await anext(it)

        self.assertEqual(cve.id, "CVE-1-1")
        self.http_client.get.assert_awaited_once_with(
            "https://services.nvd.nist.gov/rest/json/cves/2.0",
            headers={"apiKey": "token"},
            params={
                "startIndex": 0,
                "cvssV3Severity": "HIGH",
                "resultsPerPage": MAX_CVES_PER_PAGE,
            },
        )

        self.http_client.get.reset_mock()

        cve = await anext(it)

        self.assertEqual(cve.id, "CVE-2-1")
        self.http_client.get.assert_awaited_once_with(
            "https://services.nvd.nist.gov/rest/json/cves/2.0",
            headers={"apiKey": "token"},
            params={
                "startIndex": 1,
                "resultsPerPage": 1,
                "cvssV3Severity": "HIGH",
            },
        )

        with self.assertRaises(StopAsyncIteration):
            cve = await anext(it)

    async def test_cves_keywords(self):
        self.http_client.get.side_effect = create_cves_responses()

        it = aiter(self.api.cves(keywords=["Mac OS X", "kernel"]))
        cve = await anext(it)

        self.assertEqual(cve.id, "CVE-1-1")
        self.http_client.get.assert_awaited_once_with(
            "https://services.nvd.nist.gov/rest/json/cves/2.0",
            headers={"apiKey": "token"},
            params={
                "startIndex": 0,
                "keywordSearch": "Mac OS X kernel",
                "keywordExactMatch": "",
                "resultsPerPage": MAX_CVES_PER_PAGE,
            },
        )

        self.http_client.get.reset_mock()

        cve = await anext(it)

        self.assertEqual(cve.id, "CVE-2-1")
        self.http_client.get.assert_awaited_once_with(
            "https://services.nvd.nist.gov/rest/json/cves/2.0",
            headers={"apiKey": "token"},
            params={
                "startIndex": 1,
                "resultsPerPage": 1,
                "keywordSearch": "Mac OS X kernel",
                "keywordExactMatch": "",
            },
        )

        with self.assertRaises(StopAsyncIteration):
            cve = await anext(it)

    async def test_cves_keyword(self):
        self.http_client.get.side_effect = create_cves_responses()

        it = aiter(self.api.cves(keywords="Windows"))
        cve = await anext(it)

        self.assertEqual(cve.id, "CVE-1-1")
        self.http_client.get.assert_awaited_once_with(
            "https://services.nvd.nist.gov/rest/json/cves/2.0",
            headers={"apiKey": "token"},
            params={
                "startIndex": 0,
                "keywordSearch": "Windows",
                "resultsPerPage": MAX_CVES_PER_PAGE,
            },
        )

        self.http_client.get.reset_mock()

        cve = await anext(it)

        self.assertEqual(cve.id, "CVE-2-1")
        self.http_client.get.assert_awaited_once_with(
            "https://services.nvd.nist.gov/rest/json/cves/2.0",
            headers={"apiKey": "token"},
            params={
                "startIndex": 1,
                "resultsPerPage": 1,
                "keywordSearch": "Windows",
            },
        )

        with self.assertRaises(StopAsyncIteration):
            cve = await anext(it)

    async def test_cves_cwe(self):
        self.http_client.get.side_effect = create_cves_responses()

        it = aiter(self.api.cves(cwe_id="CWE-1"))
        cve = await anext(it)

        self.assertEqual(cve.id, "CVE-1-1")
        self.http_client.get.assert_awaited_once_with(
            "https://services.nvd.nist.gov/rest/json/cves/2.0",
            headers={"apiKey": "token"},
            params={
                "startIndex": 0,
                "cweId": "CWE-1",
                "resultsPerPage": MAX_CVES_PER_PAGE,
            },
        )

        self.http_client.get.reset_mock()

        cve = await anext(it)

        self.assertEqual(cve.id, "CVE-2-1")
        self.http_client.get.assert_awaited_once_with(
            "https://services.nvd.nist.gov/rest/json/cves/2.0",
            headers={"apiKey": "token"},
            params={"startIndex": 1, "resultsPerPage": 1, "cweId": "CWE-1"},
        )

        with self.assertRaises(StopAsyncIteration):
            cve = await anext(it)

    async def test_cves_source_identifier(self):
        self.http_client.get.side_effect = create_cves_responses()

        it = aiter(self.api.cves(source_identifier="nvd@nist.gov"))
        cve = await anext(it)

        self.assertEqual(cve.id, "CVE-1-1")
        self.http_client.get.assert_awaited_once_with(
            "https://services.nvd.nist.gov/rest/json/cves/2.0",
            headers={"apiKey": "token"},
            params={
                "startIndex": 0,
                "sourceIdentifier": "nvd@nist.gov",
                "resultsPerPage": MAX_CVES_PER_PAGE,
            },
        )

        self.http_client.get.reset_mock()

        cve = await anext(it)

        self.assertEqual(cve.id, "CVE-2-1")
        self.http_client.get.assert_awaited_once_with(
            "https://services.nvd.nist.gov/rest/json/cves/2.0",
            headers={"apiKey": "token"},
            params={
                "startIndex": 1,
                "sourceIdentifier": "nvd@nist.gov",
                "resultsPerPage": 1,
            },
        )

        with self.assertRaises(StopAsyncIteration):
            cve = await anext(it)

    async def test_cves_virtual_match_string(self):
        self.http_client.get.side_effect = create_cves_responses()

        it = aiter(self.api.cves(virtual_match_string="foo-bar"))
        cve = await anext(it)

        self.assertEqual(cve.id, "CVE-1-1")
        self.http_client.get.assert_awaited_once_with(
            "https://services.nvd.nist.gov/rest/json/cves/2.0",
            headers={"apiKey": "token"},
            params={
                "startIndex": 0,
                "virtualMatchString": "foo-bar",
                "resultsPerPage": MAX_CVES_PER_PAGE,
            },
        )

        self.http_client.get.reset_mock()

        cve = await anext(it)

        self.assertEqual(cve.id, "CVE-2-1")
        self.http_client.get.assert_awaited_once_with(
            "https://services.nvd.nist.gov/rest/json/cves/2.0",
            headers={"apiKey": "token"},
            params={
                "startIndex": 1,
                "virtualMatchString": "foo-bar",
                "resultsPerPage": 1,
            },
        )

        with self.assertRaises(StopAsyncIteration):
            cve = await anext(it)

    async def test_cves_has_cert_alerts(self):
        self.http_client.get.side_effect = create_cves_responses()

        it = aiter(self.api.cves(has_cert_alerts=True))
        cve = await anext(it)

        self.assertEqual(cve.id, "CVE-1-1")
        self.http_client.get.assert_awaited_once_with(
            "https://services.nvd.nist.gov/rest/json/cves/2.0",
            headers={"apiKey": "token"},
            params={
                "startIndex": 0,
                "hasCertAlerts": "",
                "resultsPerPage": MAX_CVES_PER_PAGE,
            },
        )

        self.http_client.get.reset_mock()

        cve = await anext(it)

        self.assertEqual(cve.id, "CVE-2-1")
        self.http_client.get.assert_awaited_once_with(
            "https://services.nvd.nist.gov/rest/json/cves/2.0",
            headers={"apiKey": "token"},
            params={
                "startIndex": 1,
                "hasCertAlerts": "",
                "resultsPerPage": 1,
            },
        )

        with self.assertRaises(StopAsyncIteration):
            cve = await anext(it)

    async def test_cves_has_cert_notes(self):
        self.http_client.get.side_effect = create_cves_responses()

        it = aiter(self.api.cves(has_cert_notes=True))
        cve = await anext(it)

        self.assertEqual(cve.id, "CVE-1-1")
        self.http_client.get.assert_awaited_once_with(
            "https://services.nvd.nist.gov/rest/json/cves/2.0",
            headers={"apiKey": "token"},
            params={
                "startIndex": 0,
                "hasCertNotes": "",
                "resultsPerPage": MAX_CVES_PER_PAGE,
            },
        )

        self.http_client.get.reset_mock()

        cve = await anext(it)

        self.assertEqual(cve.id, "CVE-2-1")
        self.http_client.get.assert_awaited_once_with(
            "https://services.nvd.nist.gov/rest/json/cves/2.0",
            headers={"apiKey": "token"},
            params={
                "startIndex": 1,
                "hasCertNotes": "",
                "resultsPerPage": 1,
            },
        )

        with self.assertRaises(StopAsyncIteration):
            cve = await anext(it)

    async def test_cves_has_kev(self):
        self.http_client.get.side_effect = create_cves_responses()

        it = aiter(self.api.cves(has_kev=True))
        cve = await anext(it)

        self.assertEqual(cve.id, "CVE-1-1")
        self.http_client.get.assert_awaited_once_with(
            "https://services.nvd.nist.gov/rest/json/cves/2.0",
            headers={"apiKey": "token"},
            params={
                "startIndex": 0,
                "hasKev": "",
                "resultsPerPage": MAX_CVES_PER_PAGE,
            },
        )

        self.http_client.get.reset_mock()

        cve = await anext(it)

        self.assertEqual(cve.id, "CVE-2-1")
        self.http_client.get.assert_awaited_once_with(
            "https://services.nvd.nist.gov/rest/json/cves/2.0",
            headers={"apiKey": "token"},
            params={
                "startIndex": 1,
                "hasKev": "",
                "resultsPerPage": 1,
            },
        )

        with self.assertRaises(StopAsyncIteration):
            cve = await anext(it)

    async def test_cves_has_oval(self):
        self.http_client.get.side_effect = create_cves_responses()

        it = aiter(self.api.cves(has_oval=True))
        cve = await anext(it)

        self.assertEqual(cve.id, "CVE-1-1")
        self.http_client.get.assert_awaited_once_with(
            "https://services.nvd.nist.gov/rest/json/cves/2.0",
            headers={"apiKey": "token"},
            params={
                "startIndex": 0,
                "hasOval": "",
                "resultsPerPage": MAX_CVES_PER_PAGE,
            },
        )

        self.http_client.get.reset_mock()

        cve = await anext(it)

        self.assertEqual(cve.id, "CVE-2-1")
        self.http_client.get.assert_awaited_once_with(
            "https://services.nvd.nist.gov/rest/json/cves/2.0",
            headers={"apiKey": "token"},
            params={
                "startIndex": 1,
                "hasOval": "",
                "resultsPerPage": 1,
            },
        )

        with self.assertRaises(StopAsyncIteration):
            cve = await anext(it)

    async def test_cves_request_results(self):
        self.http_client.get.side_effect = create_cves_responses(
            results_per_response=2
        )

        it = aiter(self.api.cves(request_results=10))
        cve = await anext(it)

        self.assertEqual(cve.id, "CVE-1-1")
        self.http_client.get.assert_awaited_once_with(
            "https://services.nvd.nist.gov/rest/json/cves/2.0",
            headers={"apiKey": "token"},
            params={
                "startIndex": 0,
                "resultsPerPage": 10,
            },
        )

        self.http_client.get.reset_mock()
        cve = await anext(it)
        self.assertEqual(cve.id, "CVE-1-2")
        self.http_client.get.assert_not_called()

        self.http_client.get.reset_mock()

        cve = await anext(it)

        self.assertEqual(cve.id, "CVE-2-1")
        self.http_client.get.assert_awaited_once_with(
            "https://services.nvd.nist.gov/rest/json/cves/2.0",
            headers={"apiKey": "token"},
            params={
                "startIndex": 2,
                "resultsPerPage": 2,
            },
        )

        self.http_client.get.reset_mock()
        cve = await anext(it)
        self.assertEqual(cve.id, "CVE-2-2")
        self.http_client.get.assert_not_called()

        with self.assertRaises(Exception):
            cve = await anext(it)

    async def test_context_manager(self):
        async with self.api:
            pass

        self.http_client.__aenter__.assert_awaited_once()
        self.http_client.__aexit__.assert_awaited_once()

    @patch("pontos.nvd.api.time.monotonic", autospec=True)
    @patch("pontos.nvd.api.asyncio.sleep", autospec=True)
    async def test_rate_limit(
        self,
        sleep_mock: MagicMock,
        monotonic_mock: MagicMock,
    ):
        self.http_client.get.side_effect = create_cves_responses(6)
        self.api._rate_limit = 5  # pylint: disable=protected-access
        monotonic_mock.side_effect = [10.0, 11.0]

        it = aiter(self.api.cves())
        await anext(it)
        await anext(it)
        await anext(it)
        await anext(it)
        await anext(it)

        sleep_mock.assert_not_called()

        await anext(it)

        sleep_mock.assert_called_once_with(20.0)
pontos-25.3.2/tests/nvd/cve_changes/000077500000000000000000000000001476255566300173465ustar00rootroot00000000000000pontos-25.3.2/tests/nvd/cve_changes/__init__.py000066400000000000000000000001321476255566300214530ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later
pontos-25.3.2/tests/nvd/cve_changes/test_api.py000066400000000000000000000254401476255566300215350ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later


from datetime import datetime, timezone
from typing import Any, Optional
from unittest.mock import MagicMock, patch
from uuid import UUID

from httpx import AsyncClient, Response

from pontos.errors import PontosError
from pontos.nvd.api import now
from pontos.nvd.cve_changes.api import MAX_CVE_CHANGES_PER_PAGE, CVEChangesApi
from pontos.nvd.models.cve_change import Detail, EventName
from tests import AsyncMock, IsolatedAsyncioTestCase, aiter, anext
from tests.nvd import get_cve_change_data


def create_cve_changes_response(
    cve_id: str, update: Optional[dict[str, Any]] = None
) -> MagicMock:
    data = {
        "cve_changes": [{"change": get_cve_change_data({"cve_id": cve_id})}],
        "results_per_page": 1,
    }
    if update:
        data.update(update)

    response = MagicMock(spec=Response)
    response.json.return_value = data
    return response


def create_cve_changes_responses(count: int = 2) -> list[MagicMock]:
    return [
        create_cve_changes_response(f"CVE-{i}", {"total_results": count})
        for i in range(1, count + 1)
    ]


class CVEChangesApiTestCase(IsolatedAsyncioTestCase):
    @patch("pontos.nvd.api.AsyncClient", spec=AsyncClient)
    def setUp(self, async_client: MagicMock) -> None:
        self.http_client = AsyncMock()
        async_client.return_value = self.http_client
        self.api = CVEChangesApi(token="token")

    async def test_cve_changes(self):
        self.http_client.get.side_effect = create_cve_changes_responses()

        it = aiter(self.api.changes())
        cve_change = await anext(it)

        self.assertEqual(cve_change.cve_id, "CVE-1")
        self.assertEqual(cve_change.event_name, EventName.INITIAL_ANALYSIS)
        self.assertEqual(
            cve_change.cve_change_id,
            UUID("5160FDEB-0FF0-457B-AA36-0AEDCAB2522E"),
        )
        self.assertEqual(cve_change.source_identifier, "nvd@nist.gov")
        self.assertEqual(
            cve_change.created,
            datetime(2022, 3, 18, 20, 13, 8, 123000, tzinfo=timezone.utc),
        )
        self.assertEqual(
            cve_change.details,
            [
                Detail(
                    action="Added",
                    type="CVSS V2",
                    new_value="NIST (AV:L/AC:L/Au:N/C:P/I:N/A:N)",
                ),
                Detail(
                    action="Added",
                    type="CVSS V3.1",
                    new_value="NIST AV:L/AC:L/PR:L/UI:N/S:C/C:H/I:N/A:N",
                ),
                Detail(
                    action="Changed",
                    type="Reference Type",
                    old_value="http://www.openwall.com/lists/oss-security/2022/03/18/2 No Types Assigned",
                    new_value="http://www.openwall.com/lists/oss-security/2022/03/18/2 Mailing List, Third Party Advisory",
                ),
            ],
        )

        self.http_client.get.assert_awaited_once_with(
            "https://services.nvd.nist.gov/rest/json/cvehistory/2.0",
            headers={"apiKey": "token"},
            params={
                "startIndex": 0,
                "resultsPerPage": MAX_CVE_CHANGES_PER_PAGE,
            },
        )

        self.http_client.get.reset_mock()

        cve_change = await anext(it)
        self.assertEqual(cve_change.cve_id, "CVE-2")
        self.http_client.get.assert_awaited_once_with(
            "https://services.nvd.nist.gov/rest/json/cvehistory/2.0",
            headers={"apiKey": "token"},
            params={"startIndex": 1, "resultsPerPage": 1},
        )

        with self.assertRaises(StopAsyncIteration):
            cve_change = await anext(it)

    async def test_cve_changes_change_dates(self):
        self.http_client.get.side_effect = create_cve_changes_responses()

        it = aiter(
            self.api.changes(
                change_start_date=datetime(2022, 12, 1),
                change_end_date=datetime(2022, 12, 31),
            )
        )
        cve_changes = await anext(it)

        self.assertEqual(cve_changes.cve_id, "CVE-1")
        self.http_client.get.assert_awaited_once_with(
            "https://services.nvd.nist.gov/rest/json/cvehistory/2.0",
            headers={"apiKey": "token"},
            params={
                "startIndex": 0,
                "changeStartDate": "2022-12-01T00:00:00",
                "changeEndDate": "2022-12-31T00:00:00",
                "resultsPerPage": MAX_CVE_CHANGES_PER_PAGE,
            },
        )

        self.http_client.get.reset_mock()

        cve_changes = await anext(it)

        self.assertEqual(cve_changes.cve_id, "CVE-2")
        self.http_client.get.assert_awaited_once_with(
            "https://services.nvd.nist.gov/rest/json/cvehistory/2.0",
            headers={"apiKey": "token"},
            params={
                "startIndex": 1,
                "changeStartDate": "2022-12-01T00:00:00",
                "changeEndDate": "2022-12-31T00:00:00",
                "resultsPerPage": 1,
            },
        )

        with self.assertRaises(StopAsyncIteration):
            cve_changes = await anext(it)

    async def test_cve_changes_cve_id(self):
        self.http_client.get.side_effect = create_cve_changes_responses()

        it = aiter(self.api.changes(cve_id="CVE-1"))
        cve_changes = await anext(it)

        self.assertEqual(cve_changes.cve_id, "CVE-1")
        self.http_client.get.assert_awaited_once_with(
            "https://services.nvd.nist.gov/rest/json/cvehistory/2.0",
            headers={"apiKey": "token"},
            params={
                "startIndex": 0,
                "cveId": "CVE-1",
                "resultsPerPage": MAX_CVE_CHANGES_PER_PAGE,
            },
        )

        self.http_client.get.reset_mock()

        cve_changes = await anext(it)

        self.assertEqual(cve_changes.cve_id, "CVE-2")
        self.http_client.get.assert_awaited_once_with(
            "https://services.nvd.nist.gov/rest/json/cvehistory/2.0",
            headers={"apiKey": "token"},
            params={
                "startIndex": 1,
                "cveId": "CVE-1",
                "resultsPerPage": 1,
            },
        )

        with self.assertRaises(StopAsyncIteration):
            cve_changes = await anext(it)

    async def test_cve_changes_event_name(self):
        self.http_client.get.side_effect = create_cve_changes_responses()

        it = aiter(self.api.changes(event_name=EventName.INITIAL_ANALYSIS))
        cve_changes = await anext(it)

        self.assertEqual(cve_changes.cve_id, "CVE-1")
        self.http_client.get.assert_awaited_once_with(
            "https://services.nvd.nist.gov/rest/json/cvehistory/2.0",
            headers={"apiKey": "token"},
            params={
                "startIndex": 0,
                "eventName": "Initial Analysis",
                "resultsPerPage": MAX_CVE_CHANGES_PER_PAGE,
            },
        )

        self.http_client.get.reset_mock()

        cve_changes = await anext(it)

        self.assertEqual(cve_changes.cve_id, "CVE-2")
        self.http_client.get.assert_awaited_once_with(
            "https://services.nvd.nist.gov/rest/json/cvehistory/2.0",
            headers={"apiKey": "token"},
            params={
                "startIndex": 1,
                "eventName": "Initial Analysis",
                "resultsPerPage": 1,
            },
        )

        with self.assertRaises(StopAsyncIteration):
            await anext(it)

    @patch("pontos.nvd.cve_changes.api.now", spec=now)
    async def test_cve_changes_calculate_end_date(self, now_mock: MagicMock):
        now_mock.return_value = datetime(2023, 1, 2, tzinfo=timezone.utc)
        self.http_client.get.side_effect = create_cve_changes_responses()

        it = aiter(
            self.api.changes(
                change_start_date=datetime(2023, 1, 1, tzinfo=timezone.utc)
            )
        )

        await anext(it)

        self.http_client.get.assert_awaited_once_with(
            "https://services.nvd.nist.gov/rest/json/cvehistory/2.0",
            headers={"apiKey": "token"},
            params={
                "startIndex": 0,
                "changeStartDate": "2023-01-01T00:00:00+00:00",
                "changeEndDate": "2023-01-02T00:00:00+00:00",
                "resultsPerPage": MAX_CVE_CHANGES_PER_PAGE,
            },
        )

    @patch("pontos.nvd.cve_changes.api.now", spec=now)
    async def test_cve_changes_calculate_end_date_with_limit(
        self, now_mock: MagicMock
    ):
        now_mock.return_value = datetime(2023, 5, 2, tzinfo=timezone.utc)
        self.http_client.get.side_effect = create_cve_changes_responses()

        it = aiter(
            self.api.changes(
                change_start_date=datetime(2023, 1, 1, tzinfo=timezone.utc)
            )
        )

        await anext(it)

        self.http_client.get.assert_awaited_once_with(
            "https://services.nvd.nist.gov/rest/json/cvehistory/2.0",
            headers={"apiKey": "token"},
            params={
                "startIndex": 0,
                "changeStartDate": "2023-01-01T00:00:00+00:00",
                "changeEndDate": "2023-05-01T00:00:00+00:00",
                "resultsPerPage": MAX_CVE_CHANGES_PER_PAGE,
            },
        )

    async def test_cve_changes_calculate_start_date(self):
        self.http_client.get.side_effect = create_cve_changes_responses()

        it = aiter(
            self.api.changes(
                change_end_date=datetime(2023, 5, 1, tzinfo=timezone.utc)
            )
        )

        await anext(it)

        self.http_client.get.assert_awaited_once_with(
            "https://services.nvd.nist.gov/rest/json/cvehistory/2.0",
            headers={"apiKey": "token"},
            params={
                "startIndex": 0,
                "changeStartDate": "2023-01-01T00:00:00+00:00",
                "changeEndDate": "2023-05-01T00:00:00+00:00",
                "resultsPerPage": MAX_CVE_CHANGES_PER_PAGE,
            },
        )

    async def test_cve_changes_range_too_long(self):
        with self.assertRaises(PontosError):
            self.api.changes(
                change_start_date=datetime(2023, 1, 1),
                change_end_date=datetime(2023, 5, 2),
            )

    async def test_cve_changes_request_results(self):
        self.http_client.get.side_effect = create_cve_changes_responses()

        it = aiter(self.api.changes(request_results=10))

        await anext(it)

        self.http_client.get.assert_awaited_once_with(
            "https://services.nvd.nist.gov/rest/json/cvehistory/2.0",
            headers={"apiKey": "token"},
            params={
                "startIndex": 0,
                "resultsPerPage": 10,
            },
        )

    async def test_context_manager(self):
        async with self.api:
            pass

        self.http_client.__aenter__.assert_awaited_once()
        self.http_client.__aexit__.assert_awaited_once()
pontos-25.3.2/tests/nvd/models/000077500000000000000000000000001476255566300163645ustar00rootroot00000000000000pontos-25.3.2/tests/nvd/models/__init__.py000066400000000000000000000001411476255566300204710ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later
#
pontos-25.3.2/tests/nvd/models/test_cpe.py000066400000000000000000000063221476255566300205470ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later

# pylint: disable=line-too-long
# ruff: noqa: E501

import unittest
from datetime import datetime, timezone
from uuid import UUID

from pontos.nvd.models.cpe import CPE, ReferenceType
from tests.nvd import get_cpe_data


class CPETestCase(unittest.TestCase):
    def test_required_only(self):
        cpe = CPE.from_dict(get_cpe_data())

        self.assertEqual(
            cpe.cpe_name,
            "cpe:2.3:o:microsoft:windows_10_22h2:-:*:*:*:*:*:arm64:*",
        )
        self.assertEqual(
            cpe.cpe_name_id, UUID("9BAECDB2-614D-4E9C-9936-190C30246F03")
        )
        self.assertFalse(cpe.deprecated)
        self.assertEqual(
            cpe.last_modified,
            datetime(2022, 12, 9, 18, 15, 16, 973000, tzinfo=timezone.utc),
        )
        self.assertEqual(
            cpe.created,
            datetime(2022, 12, 9, 16, 20, 6, 943000, tzinfo=timezone.utc),
        )

        self.assertEqual(cpe.titles, [])
        self.assertEqual(cpe.refs, [])
        self.assertEqual(cpe.deprecated_by, [])

    def test_titles(self):
        cpe = CPE.from_dict(
            get_cpe_data(
                {
                    "titles": [
                        {
                            "title": "Microsoft Windows 10 22h2 on ARM64",
                            "lang": "en",
                        }
                    ],
                }
            )
        )

        self.assertEqual(len(cpe.titles), 1)

        title = cpe.titles[0]
        self.assertEqual(title.title, "Microsoft Windows 10 22h2 on ARM64")
        self.assertEqual(title.lang, "en")

    def test_refs(self):
        cpe = CPE.from_dict(
            get_cpe_data(
                {
                    "refs": [
                        {
                            "ref": "https://learn.microsoft.com/en-us/windows/release-health/release-information",
                            "type": "Version",
                        }
                    ],
                }
            )
        )

        self.assertEqual(len(cpe.refs), 1)

        reference = cpe.refs[0]
        self.assertEqual(
            reference.ref,
            "https://learn.microsoft.com/en-us/windows/release-health/release-information",
        )
        self.assertEqual(reference.type, ReferenceType.VERSION)

    def test_deprecated(self):
        cpe = CPE.from_dict(
            get_cpe_data(
                {
                    "deprecated": True,
                    "deprecated_by": [
                        {
                            "cpe_name": "cpe:2.3:o:microsoft:windows_10_22h2:-:*:*:*:*:*:x64:*",
                            "cpe_name_id": "A09335E2-B42F-4820-B487-57A4BF0CEE98",
                        }
                    ],
                }
            )
        )

        self.assertTrue(cpe.deprecated)
        self.assertEqual(len(cpe.deprecated_by), 1)

        deprecated_by = cpe.deprecated_by[0]
        self.assertEqual(
            deprecated_by.cpe_name,
            "cpe:2.3:o:microsoft:windows_10_22h2:-:*:*:*:*:*:x64:*",
        )
        self.assertEqual(
            deprecated_by.cpe_name_id,
            UUID("A09335E2-B42F-4820-B487-57A4BF0CEE98"),
        )
pontos-25.3.2/tests/nvd/models/test_cpe_match.py000066400000000000000000000077041476255566300217300ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2024 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later

# pylint: disable=line-too-long
# ruff: noqa: E501

import unittest
from datetime import datetime, timezone
from uuid import UUID

from pontos.nvd.models.cpe_match_string import CPEMatch, CPEMatchString
from tests.nvd import get_cpe_match_data


class CPEMatchTestCase(unittest.TestCase):

    def test_required_only(self):
        """
        Test the required attributes of a CPEMatchString
        """
        data = get_cpe_match_data()
        data.__delitem__("matches")
        data.__delitem__("version_end_including")
        data.__delitem__("cpe_last_modified")

        cpe_match_string = CPEMatchString.from_dict(data)

        self.assertEqual(
            UUID("EAB2C9C2-F685-450B-9980-553966FC3B63"),
            cpe_match_string.match_criteria_id,
        )
        self.assertEqual(
            "cpe:2.3:a:sun:jre:*:update3:*:*:*:*:*:*",
            cpe_match_string.criteria,
        )
        self.assertEqual(
            "Active",
            cpe_match_string.status,
        )
        self.assertEqual(
            datetime(2019, 6, 17, 9, 16, 33, 960000, tzinfo=timezone.utc),
            cpe_match_string.created,
        )
        self.assertEqual(
            datetime(2019, 6, 17, 9, 16, 44, 0, tzinfo=timezone.utc),
            cpe_match_string.last_modified,
        )

        self.assertEqual([], cpe_match_string.matches)

        self.assertIsNone(cpe_match_string.cpe_last_modified)
        self.assertIsNone(cpe_match_string.version_start_excluding)
        self.assertIsNone(cpe_match_string.version_end_excluding)
        self.assertIsNone(cpe_match_string.version_start_including)
        self.assertIsNone(cpe_match_string.version_end_including)

    def test_cpe_last_modified(self):
        data = get_cpe_match_data()
        cpe_match_string = CPEMatchString.from_dict(data)

        self.assertEqual(
            datetime(2019, 7, 22, 16, 37, 38, 133000, tzinfo=timezone.utc),
            cpe_match_string.cpe_last_modified,
        )

    def test_matches(self):
        """
        Test the matches list of a CPEMatchString
        """
        cpe_match_string = CPEMatchString.from_dict(get_cpe_match_data())

        self.assertEqual(5, len(cpe_match_string.matches))

        self.assertEqual(
            CPEMatch(
                "cpe:2.3:a:sun:jre:1.3.0:update3:*:*:*:*:*:*",
                UUID("2d284534-da21-43d5-9d89-07f19ae400ea"),
            ),
            cpe_match_string.matches[0],
        )

        self.assertEqual(
            CPEMatch(
                "cpe:2.3:a:sun:jre:1.6.0:update3:*:*:*:*:*:*",
                UUID("c518a954-369e-453e-8e17-2af639150115"),
            ),
            cpe_match_string.matches[-1],
        )

    def test_including_version_limits(self):
        """
        Test the including version limits of a CPEMatchString
        """
        data = get_cpe_match_data({"version_start_including": "1.3.0"})
        cpe_match_string = CPEMatchString.from_dict(data)

        self.assertEqual("1.3.0", cpe_match_string.version_start_including)
        self.assertEqual("1.6.0", cpe_match_string.version_end_including)
        self.assertIsNone(cpe_match_string.version_start_excluding)
        self.assertIsNone(cpe_match_string.version_end_excluding)

    def test_excluding_version_limits(self):
        """
        Test the excluding version limits of a CPEMatchString
        """
        data = get_cpe_match_data(
            {
                "version_start_excluding": "1.2.0",
                "version_end_excluding": "1.7.0",
            }
        )
        data.__delitem__("version_end_including")
        cpe_match_string = CPEMatchString.from_dict(data)

        self.assertEqual("1.2.0", cpe_match_string.version_start_excluding)
        self.assertEqual("1.7.0", cpe_match_string.version_end_excluding)
        self.assertIsNone(cpe_match_string.version_start_including)
        self.assertIsNone(cpe_match_string.version_end_including)
pontos-25.3.2/tests/nvd/models/test_cve.py000066400000000000000000000615301476255566300205570ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later
#

# pylint: disable=line-too-long
# ruff: noqa: E501

import unittest
from datetime import date, datetime, timezone

from pontos.nvd.models import cvss_v2, cvss_v3
from pontos.nvd.models.cve import CVE, CVSSType, Operator
from tests.nvd import get_cve_data


class CVETestCase(unittest.TestCase):
    def test_required_only(self):
        cve = CVE.from_dict(get_cve_data())

        self.assertEqual(cve.id, "CVE-2022-45536")
        self.assertEqual(cve.source_identifier, "cve@mitre.org")
        self.assertEqual(
            cve.published,
            datetime(2022, 11, 22, 21, 15, 11, 103000, tzinfo=timezone.utc),
        )
        self.assertEqual(
            cve.last_modified,
            datetime(2022, 11, 23, 16, 2, 7, 367000, tzinfo=timezone.utc),
        )
        self.assertEqual(len(cve.descriptions), 1)
        self.assertEqual(len(cve.references), 2)
        self.assertEqual(len(cve.weaknesses), 0)
        self.assertEqual(len(cve.configurations), 0)
        self.assertEqual(len(cve.vendor_comments), 0)
        self.assertIsNone(cve.metrics)
        self.assertIsNone(cve.evaluator_comment)
        self.assertIsNone(cve.evaluator_solution)
        self.assertIsNone(cve.evaluator_impact)
        self.assertIsNone(cve.cisa_exploit_add)
        self.assertIsNone(cve.cisa_action_due)
        self.assertIsNone(cve.cisa_required_action)
        self.assertIsNone(cve.cisa_vulnerability_name)

    def test_descriptions(self):
        cve = CVE.from_dict(get_cve_data())

        description = cve.descriptions[0]
        self.assertEqual(description.lang, "en")
        self.assertEqual(
            description.value,
            "AeroCMS v0.0.1 was discovered to contain a SQL "
            "Injection vulnerability via the id parameter at "
            "\\admin\\post_comments.php. This vulnerability allows "
            "attackers to access database information.",
        )

    def test_references(self):
        cve = CVE.from_dict(get_cve_data())

        reference = cve.references[0]
        self.assertEqual(
            reference.url,
            "https://github.com/rdyx0/CVE/blob/master/AeroCMS/AeroCMS-v0.0.1-SQLi/post_comments_sql_injection/post_comments_sql_injection.md",
        )
        self.assertEqual(reference.source, "cve@mitre.org")
        self.assertEqual(reference.tags, ["Exploit", "Third Party Advisory"])

        reference = cve.references[1]
        self.assertEqual(
            reference.url,
            "https://rdyx0.github.io/2018/09/07/AeroCMS-v0.0.1-SQLi%20post_comments_sql_injection/",
        )
        self.assertEqual(reference.source, "cve@mitre.org")
        self.assertEqual(reference.tags, ["Exploit", "Third Party Advisory"])

    def test_weaknesses(self):
        cve = CVE.from_dict(
            get_cve_data(
                {
                    "weaknesses": [
                        {
                            "source": "nvd@nist.gov",
                            "type": "Primary",
                            "description": [{"lang": "en", "value": "CWE-89"}],
                        }
                    ],
                }
            )
        )

        self.assertEqual(len(cve.weaknesses), 1)

        weakness = cve.weaknesses[0]
        self.assertEqual(weakness.source, "nvd@nist.gov")
        self.assertEqual(weakness.type, "Primary")
        self.assertEqual(len(weakness.description), 1)

        description = weakness.description[0]
        self.assertEqual(description.lang, "en")
        self.assertEqual(description.value, "CWE-89")

    def test_configuration(self):
        cve = CVE.from_dict(
            get_cve_data(
                {
                    "configurations": [
                        {
                            "nodes": [
                                {
                                    "operator": "OR",
                                    "negate": False,
                                    "cpe_match": [
                                        {
                                            "vulnerable": True,
                                            "criteria": "cpe:2.3:a:aerocms_project:aerocms:0.0.1:*:*:*:*:*:*:*",
                                            "match_criteria_id": "52639E84-244D-4DA3-B1AE-6D8BA1C38863",
                                        }
                                    ],
                                }
                            ]
                        }
                    ],
                }
            )
        )

        self.assertEqual(len(cve.configurations), 1)

        configuration = cve.configurations[0]
        self.assertIsNone(configuration.operator)
        self.assertIsNone(configuration.negate)
        self.assertEqual(len(configuration.nodes), 1)

        node = configuration.nodes[0]
        self.assertFalse(node.negate)
        self.assertEqual(node.operator, Operator.OR)
        self.assertEqual(len(node.cpe_match), 1)

        cpe_match = node.cpe_match[0]
        self.assertTrue(cpe_match.vulnerable)
        self.assertEqual(
            cpe_match.criteria,
            "cpe:2.3:a:aerocms_project:aerocms:0.0.1:*:*:*:*:*:*:*",
        )
        self.assertEqual(
            cpe_match.match_criteria_id, "52639E84-244D-4DA3-B1AE-6D8BA1C38863"
        )
        self.assertIsNone(cpe_match.version_start_including)
        self.assertIsNone(cpe_match.version_start_excluding)
        self.assertIsNone(cpe_match.version_end_including)
        self.assertIsNone(cpe_match.version_end_excluding)

    def test_metrics_v2(self):
        cve = CVE.from_dict(
            get_cve_data(
                {
                    "metrics": {
                        "cvss_metric_v2": [
                            {
                                "source": "nvd@nist.gov",
                                "type": "Primary",
                                "cvss_data": {
                                    "version": "2.0",
                                    "vector_string": "AV:N/AC:M/Au:N/C:N/I:P/A:N",
                                    "access_vector": "NETWORK",
                                    "access_complexity": "MEDIUM",
                                    "authentication": "NONE",
                                    "confidentiality_impact": "NONE",
                                    "integrity_impact": "PARTIAL",
                                    "availability_impact": "NONE",
                                    "base_score": 4.3,
                                    "base_severity": "MEDIUM",
                                },
                                "exploitability_score": 8.6,
                                "impact_score": 2.9,
                                "ac_insuf_info": False,
                                "obtain_all_privilege": False,
                                "obtain_user_privilege": False,
                                "obtain_other_privilege": False,
                                "user_interaction_required": True,
                            }
                        ]
                    }
                }
            )
        )

        self.assertEqual(len(cve.metrics.cvss_metric_v2), 1)
        self.assertEqual(len(cve.metrics.cvss_metric_v30), 0)
        self.assertEqual(len(cve.metrics.cvss_metric_v31), 0)

        cvss_metric = cve.metrics.cvss_metric_v2[0]
        self.assertEqual(cvss_metric.source, "nvd@nist.gov")
        self.assertEqual(cvss_metric.type, CVSSType.PRIMARY)
        self.assertEqual(cvss_metric.exploitability_score, 8.6)
        self.assertEqual(cvss_metric.impact_score, 2.9)
        self.assertFalse(cvss_metric.ac_insuf_info)
        self.assertFalse(cvss_metric.obtain_all_privilege)
        self.assertFalse(cvss_metric.obtain_user_privilege)
        self.assertFalse(cvss_metric.obtain_other_privilege)
        self.assertTrue(cvss_metric.user_interaction_required)

        cvss_data = cvss_metric.cvss_data
        self.assertEqual(cvss_data.version, "2.0")
        self.assertEqual(cvss_data.vector_string, "AV:N/AC:M/Au:N/C:N/I:P/A:N")
        self.assertEqual(cvss_data.base_score, 4.3)
        self.assertEqual(cvss_data.access_vector, cvss_v2.AccessVector.NETWORK)
        self.assertEqual(
            cvss_data.access_complexity, cvss_v2.AccessComplexity.MEDIUM
        )
        self.assertEqual(cvss_data.authentication, cvss_v2.Authentication.NONE)
        self.assertEqual(cvss_data.confidentiality_impact, cvss_v2.Impact.NONE)
        self.assertEqual(cvss_data.integrity_impact, cvss_v2.Impact.PARTIAL)
        self.assertEqual(cvss_data.availability_impact, cvss_v2.Impact.NONE)
        self.assertIsNone(cvss_data.exploitability)
        self.assertIsNone(cvss_data.remediation_level)
        self.assertIsNone(cvss_data.report_confidence)
        self.assertIsNone(cvss_data.temporal_score)
        self.assertIsNone(cvss_data.collateral_damage_potential)
        self.assertIsNone(cvss_data.target_distribution)
        self.assertIsNone(cvss_data.confidentiality_requirement)
        self.assertIsNone(cvss_data.integrity_requirement)
        self.assertIsNone(cvss_data.availability_requirement)
        self.assertIsNone(cvss_data.environmental_score)

    def test_metrics_v30(self):
        cve = CVE.from_dict(
            get_cve_data(
                {
                    "metrics": {
                        "cvss_metric_v30": [
                            {
                                "source": "nvd@nist.gov",
                                "type": "Primary",
                                "cvss_data": {
                                    "version": "3.0",
                                    "vector_string": "CVSS:3.0/AV:N/AC:L/PR:N/UI:R/S:C/C:L/I:L/A:N",
                                    "attack_vector": "NETWORK",
                                    "attack_complexity": "LOW",
                                    "privileges_required": "NONE",
                                    "user_interaction": "REQUIRED",
                                    "scope": "CHANGED",
                                    "confidentiality_impact": "LOW",
                                    "integrity_impact": "LOW",
                                    "availability_impact": "NONE",
                                    "base_score": 6.1,
                                    "base_severity": "MEDIUM",
                                },
                                "exploitability_score": 2.8,
                                "impact_score": 2.7,
                            }
                        ],
                    }
                }
            )
        )

        self.assertEqual(len(cve.metrics.cvss_metric_v2), 0)
        self.assertEqual(len(cve.metrics.cvss_metric_v30), 1)
        self.assertEqual(len(cve.metrics.cvss_metric_v31), 0)

        cvss_metric = cve.metrics.cvss_metric_v30[0]
        self.assertEqual(cvss_metric.source, "nvd@nist.gov")
        self.assertEqual(cvss_metric.type, CVSSType.PRIMARY)
        self.assertEqual(cvss_metric.exploitability_score, 2.8)
        self.assertEqual(cvss_metric.impact_score, 2.7)

        cvss_data = cvss_metric.cvss_data
        self.assertEqual(cvss_data.version, "3.0")
        self.assertEqual(
            cvss_data.vector_string,
            "CVSS:3.0/AV:N/AC:L/PR:N/UI:R/S:C/C:L/I:L/A:N",
        )
        self.assertEqual(cvss_data.base_score, 6.1)
        self.assertEqual(cvss_data.base_severity, cvss_v3.Severity.MEDIUM)
        self.assertEqual(cvss_data.attack_vector, cvss_v3.AttackVector.NETWORK)
        self.assertEqual(
            cvss_data.attack_complexity, cvss_v3.AttackComplexity.LOW
        )
        self.assertEqual(
            cvss_data.privileges_required, cvss_v3.PrivilegesRequired.NONE
        )
        self.assertEqual(
            cvss_data.user_interaction, cvss_v3.UserInteraction.REQUIRED
        )
        self.assertEqual(cvss_data.scope, cvss_v3.Scope.CHANGED)
        self.assertEqual(cvss_data.confidentiality_impact, cvss_v3.Impact.LOW)
        self.assertEqual(cvss_data.integrity_impact, cvss_v3.Impact.LOW)
        self.assertEqual(cvss_data.availability_impact, cvss_v3.Impact.NONE)
        self.assertIsNone(cvss_data.exploit_code_maturity)
        self.assertIsNone(cvss_data.remediation_level)
        self.assertIsNone(cvss_data.report_confidence)
        self.assertIsNone(cvss_data.temporal_score)
        self.assertIsNone(cvss_data.temporal_severity)
        self.assertIsNone(cvss_data.confidentiality_requirement)
        self.assertIsNone(cvss_data.integrity_requirement)
        self.assertIsNone(cvss_data.availability_requirement)
        self.assertIsNone(cvss_data.modified_attack_vector)
        self.assertIsNone(cvss_data.modified_attack_complexity)
        self.assertIsNone(cvss_data.modified_privileges_required)
        self.assertIsNone(cvss_data.modified_user_interaction)
        self.assertIsNone(cvss_data.modified_scope)
        self.assertIsNone(cvss_data.modified_confidentiality_impact)
        self.assertIsNone(cvss_data.modified_integrity_impact)
        self.assertIsNone(cvss_data.modified_availability_impact)
        self.assertIsNone(cvss_data.environmental_score)
        self.assertIsNone(cvss_data.environmental_severity)

    def test_metrics_v31(self):
        cve = CVE.from_dict(
            get_cve_data(
                {
                    "metrics": {
                        "cvss_metric_v31": [
                            {
                                "source": "nvd@nist.gov",
                                "type": "Primary",
                                "cvss_data": {
                                    "version": "3.1",
                                    "vector_string": "CVSS:3.1/AV:N/AC:L/PR:H/UI:N/S:U/C:H/I:N/A:N",
                                    "attack_vector": "NETWORK",
                                    "attack_complexity": "LOW",
                                    "privileges_required": "HIGH",
                                    "user_interaction": "NONE",
                                    "scope": "UNCHANGED",
                                    "confidentiality_impact": "HIGH",
                                    "integrity_impact": "NONE",
                                    "availability_impact": "NONE",
                                    "base_score": 4.9,
                                    "base_severity": "MEDIUM",
                                },
                                "exploitability_score": 1.2,
                                "impact_score": 3.6,
                            }
                        ]
                    },
                }
            )
        )

        self.assertEqual(len(cve.metrics.cvss_metric_v2), 0)
        self.assertEqual(len(cve.metrics.cvss_metric_v30), 0)
        self.assertEqual(len(cve.metrics.cvss_metric_v31), 1)

        cvss_metric = cve.metrics.cvss_metric_v31[0]
        self.assertEqual(cvss_metric.source, "nvd@nist.gov")
        self.assertEqual(cvss_metric.type, CVSSType.PRIMARY)
        self.assertEqual(cvss_metric.exploitability_score, 1.2)
        self.assertEqual(cvss_metric.impact_score, 3.6)

        cvss_data = cvss_metric.cvss_data
        self.assertEqual(cvss_data.version, "3.1")
        self.assertEqual(
            cvss_data.vector_string,
            "CVSS:3.1/AV:N/AC:L/PR:H/UI:N/S:U/C:H/I:N/A:N",
        )
        self.assertEqual(cvss_data.base_score, 4.9)
        self.assertEqual(cvss_data.base_severity, cvss_v3.Severity.MEDIUM)
        self.assertEqual(cvss_data.attack_vector, cvss_v3.AttackVector.NETWORK)
        self.assertEqual(
            cvss_data.attack_complexity, cvss_v3.AttackComplexity.LOW
        )
        self.assertEqual(
            cvss_data.privileges_required, cvss_v3.PrivilegesRequired.HIGH
        )
        self.assertEqual(
            cvss_data.user_interaction, cvss_v3.UserInteraction.NONE
        )
        self.assertEqual(cvss_data.scope, cvss_v3.Scope.UNCHANGED)
        self.assertEqual(cvss_data.confidentiality_impact, cvss_v3.Impact.HIGH)
        self.assertEqual(cvss_data.integrity_impact, cvss_v3.Impact.NONE)
        self.assertEqual(cvss_data.availability_impact, cvss_v3.Impact.NONE)
        self.assertIsNone(cvss_data.exploit_code_maturity)
        self.assertIsNone(cvss_data.remediation_level)
        self.assertIsNone(cvss_data.report_confidence)
        self.assertIsNone(cvss_data.temporal_score)
        self.assertIsNone(cvss_data.temporal_severity)
        self.assertIsNone(cvss_data.confidentiality_requirement)
        self.assertIsNone(cvss_data.integrity_requirement)
        self.assertIsNone(cvss_data.availability_requirement)
        self.assertIsNone(cvss_data.modified_attack_vector)
        self.assertIsNone(cvss_data.modified_attack_complexity)
        self.assertIsNone(cvss_data.modified_privileges_required)
        self.assertIsNone(cvss_data.modified_user_interaction)
        self.assertIsNone(cvss_data.modified_scope)
        self.assertIsNone(cvss_data.modified_confidentiality_impact)
        self.assertIsNone(cvss_data.modified_integrity_impact)
        self.assertIsNone(cvss_data.modified_availability_impact)
        self.assertIsNone(cvss_data.environmental_score)
        self.assertIsNone(cvss_data.environmental_severity)

    def test_metrics_v31_severity_none(self):
        cve = CVE.from_dict(
            get_cve_data(
                {
                    "metrics": {
                        "cvss_metric_v31": [
                            {
                                "source": "nvd@nist.gov",
                                "type": "Secondary",
                                "cvss_data": {
                                    "version": "3.1",
                                    "vector_string": "CVSS:3.1/AV:N/AC:L/PR:H/UI:R/S:U/C:N/I:N/A:N",
                                    "attack_vector": "NETWORK",
                                    "attack_complexity": "LOW",
                                    "privileges_required": "NONE",
                                    "user_interaction": "REQUIRED",
                                    "scope": "UNCHANGED",
                                    "confidentiality_impact": "NONE",
                                    "integrity_impact": "NONE",
                                    "availability_impact": "NONE",
                                    "base_score": 0.0,
                                    "base_severity": "NONE",
                                },
                                "exploitability_score": 2.8,
                                "impact_score": 0.0,
                            }
                        ]
                    },
                }
            )
        )

        self.assertEqual(len(cve.metrics.cvss_metric_v2), 0)
        self.assertEqual(len(cve.metrics.cvss_metric_v30), 0)
        self.assertEqual(len(cve.metrics.cvss_metric_v31), 1)

        cvss_metric = cve.metrics.cvss_metric_v31[0]
        self.assertEqual(cvss_metric.source, "nvd@nist.gov")
        self.assertEqual(cvss_metric.type, CVSSType.SECONDARY)
        self.assertEqual(cvss_metric.exploitability_score, 2.8)
        self.assertEqual(cvss_metric.impact_score, 0.0)

        cvss_data = cvss_metric.cvss_data
        self.assertEqual(cvss_data.version, "3.1")
        self.assertEqual(
            cvss_data.vector_string,
            "CVSS:3.1/AV:N/AC:L/PR:H/UI:R/S:U/C:N/I:N/A:N",
        )
        self.assertEqual(cvss_data.base_score, 0.0)
        self.assertEqual(cvss_data.base_severity, cvss_v3.Severity.NONE)
        self.assertEqual(cvss_data.attack_vector, cvss_v3.AttackVector.NETWORK)
        self.assertEqual(
            cvss_data.attack_complexity, cvss_v3.AttackComplexity.LOW
        )
        self.assertEqual(
            cvss_data.privileges_required, cvss_v3.PrivilegesRequired.NONE
        )
        self.assertEqual(
            cvss_data.user_interaction, cvss_v3.UserInteraction.REQUIRED
        )
        self.assertEqual(cvss_data.scope, cvss_v3.Scope.UNCHANGED)
        self.assertEqual(cvss_data.confidentiality_impact, cvss_v3.Impact.NONE)
        self.assertEqual(cvss_data.integrity_impact, cvss_v3.Impact.NONE)
        self.assertEqual(cvss_data.availability_impact, cvss_v3.Impact.NONE)
        self.assertIsNone(cvss_data.exploit_code_maturity)
        self.assertIsNone(cvss_data.remediation_level)
        self.assertIsNone(cvss_data.report_confidence)
        self.assertIsNone(cvss_data.temporal_score)
        self.assertIsNone(cvss_data.temporal_severity)
        self.assertIsNone(cvss_data.confidentiality_requirement)
        self.assertIsNone(cvss_data.integrity_requirement)
        self.assertIsNone(cvss_data.availability_requirement)
        self.assertIsNone(cvss_data.modified_attack_vector)
        self.assertIsNone(cvss_data.modified_attack_complexity)
        self.assertIsNone(cvss_data.modified_privileges_required)
        self.assertIsNone(cvss_data.modified_user_interaction)
        self.assertIsNone(cvss_data.modified_scope)
        self.assertIsNone(cvss_data.modified_confidentiality_impact)
        self.assertIsNone(cvss_data.modified_integrity_impact)
        self.assertIsNone(cvss_data.modified_availability_impact)
        self.assertIsNone(cvss_data.environmental_score)
        self.assertIsNone(cvss_data.environmental_severity)

    def test_vendor_comments(self):
        cve = CVE.from_dict(
            get_cve_data(
                {
                    "vendor_comments": [
                        {
                            "organization": "Apache",
                            "comment": "Fixed in Apache HTTP Server 1.3.12:\nhttp://httpd.apache.org/security/vulnerabilities_13.html",
                            "last_modified": "2008-07-02T00:00:00",
                        }
                    ],
                }
            )
        )

        self.assertEqual(len(cve.vendor_comments), 1)

        comment = cve.vendor_comments[0]
        self.assertEqual(comment.organization, "Apache")
        self.assertEqual(
            comment.comment,
            "Fixed in Apache HTTP Server 1.3.12:\nhttp://httpd.apache.org/security/vulnerabilities_13.html",
        )
        self.assertEqual(
            comment.last_modified, datetime(2008, 7, 2, tzinfo=timezone.utc)
        )

    def test_evaluator_comment(self):
        cve = CVE.from_dict(
            get_cve_data(
                {
                    "evaluator_comment": "Please see the following link for more information:\r\n\r\nhttp://seclists.org/bugtraq/1999/Jan/0215.html"
                }
            )
        )

        self.assertEqual(
            cve.evaluator_comment,
            "Please see the following link for more information:\r\n\r\nhttp://seclists.org/bugtraq/1999/Jan/0215.html",
        )

    def test_evaluator_solution(self):
        cve = CVE.from_dict(
            get_cve_data(
                {
                    "evaluator_solution": "This problem was fixed in Linux kernel 2.2.4 and later releases."
                }
            )
        )

        self.assertEqual(
            cve.evaluator_solution,
            "This problem was fixed in Linux kernel 2.2.4 and later releases.",
        )

    def test_evaluator_impact(self):
        cve = CVE.from_dict(
            get_cve_data(
                {
                    "evaluator_impact": "This Common Vulnerabilities and Exposures (CVE) entry is a configuration issue and not a software flaw. As such, it doesn’t fit in the CVE software flaw list. The Common Vulnerability Scoring System (CVSS) base score for this CVE entry has been set to 0 because this CVE entry has no impact as a software flaw according to CVSS. This does not mean that the configuration issue is not important and there may be security implications relative to computers having this configuration."
                }
            )
        )

        self.assertEqual(
            cve.evaluator_impact,
            "This Common Vulnerabilities and Exposures (CVE) entry is a configuration issue and not a software flaw. As such, it doesn’t fit in the CVE software flaw list. The Common Vulnerability Scoring System (CVSS) base score for this CVE entry has been set to 0 because this CVE entry has no impact as a software flaw according to CVSS. This does not mean that the configuration issue is not important and there may be security implications relative to computers having this configuration.",
        )

    def test_cisa(self):
        cve = CVE.from_dict(
            get_cve_data(
                {
                    "cisa_exploit_add": "2022-03-03",
                    "cisa_action_due": "2022-03-24",
                    "cisa_required_action": "Apply updates per vendor instructions.",
                    "cisa_vulnerability_name": "Microsoft Windows Privilege Escalation Vulnerability",
                }
            )
        )

        self.assertEqual(cve.cisa_action_due, date(2022, 3, 24))
        self.assertEqual(cve.cisa_exploit_add, date(2022, 3, 3))
        self.assertEqual(
            cve.cisa_required_action, "Apply updates per vendor instructions."
        )
        self.assertEqual(
            cve.cisa_vulnerability_name,
            "Microsoft Windows Privilege Escalation Vulnerability",
        )
pontos-25.3.2/tests/nvd/models/test_cve_change.py000066400000000000000000000040161476255566300220600ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later

# pylint: disable=line-too-long
# ruff: noqa: E501

import unittest
from datetime import datetime, timezone
from uuid import UUID

from pontos.nvd.models.cve_change import CVEChange, Detail, EventName
from tests.nvd import get_cve_change_data


class CVEChangeTestCase(unittest.TestCase):
    def test_required_only(self):
        cve_change = CVEChange.from_dict(get_cve_change_data())

        self.assertEqual(cve_change.cve_id, "CVE-2022-0001")
        self.assertEqual(cve_change.event_name, "Initial Analysis")
        self.assertEqual(
            cve_change.cve_change_id,
            UUID("5160FDEB-0FF0-457B-AA36-0AEDCAB2522E"),
        )
        self.assertEqual(cve_change.source_identifier, "nvd@nist.gov")
        self.assertEqual(
            cve_change.created,
            datetime(2022, 3, 18, 20, 13, 8, 123000, tzinfo=timezone.utc),
        )
        self.assertEqual(
            cve_change.details,
            [
                Detail(
                    action="Added",
                    type="CVSS V2",
                    new_value="NIST (AV:L/AC:L/Au:N/C:P/I:N/A:N)",
                ),
                Detail(
                    action="Added",
                    type="CVSS V3.1",
                    new_value="NIST AV:L/AC:L/PR:L/UI:N/S:C/C:H/I:N/A:N",
                ),
                Detail(
                    action="Changed",
                    type="Reference Type",
                    old_value="http://www.openwall.com/lists/oss-security/2022/03/18/2 No Types Assigned",
                    new_value="http://www.openwall.com/lists/oss-security/2022/03/18/2 Mailing List, Third Party Advisory",
                ),
            ],
        )


class EventNameTestCase(unittest.TestCase):
    def test_init(self):
        self.assertEqual(
            EventName("Initial Analysis"), EventName.INITIAL_ANALYSIS
        )

    def test__str__(self):
        self.assertEqual(str(EventName.INITIAL_ANALYSIS), "Initial Analysis")
pontos-25.3.2/tests/nvd/source/000077500000000000000000000000001476255566300164015ustar00rootroot00000000000000pontos-25.3.2/tests/nvd/source/__init__.py000066400000000000000000000001321476255566300205060ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later
pontos-25.3.2/tests/nvd/source/test_api.py000066400000000000000000000140161476255566300205650ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later


from datetime import datetime, timezone
from typing import Any, Optional
from unittest.mock import MagicMock, patch

from httpx import AsyncClient, Response

from pontos.nvd.models.source import AcceptanceLevel
from pontos.nvd.source.api import MAX_SOURCES_PER_PAGE, SourceApi
from tests import AsyncMock, IsolatedAsyncioTestCase, aiter, anext
from tests.nvd import get_source_data


def create_source_response(
    name: str, update: Optional[dict[str, Any]] = None
) -> MagicMock:
    data = {
        "sources": [get_source_data({"name": name})],
        "results_per_page": 1,
    }
    if update:
        data.update(update)

    response = MagicMock(spec=Response)
    response.json.return_value = data
    return response


def create_source_responses(count: int = 2) -> list[MagicMock]:
    return [
        create_source_response(f"MITRE-{i}", {"total_results": count})
        for i in range(1, count + 1)
    ]


class SourceAPITestCase(IsolatedAsyncioTestCase):
    @patch("pontos.nvd.api.AsyncClient", spec=AsyncClient)
    def setUp(self, async_client: MagicMock) -> None:
        self.http_client = AsyncMock()
        async_client.return_value = self.http_client
        self.api = SourceApi(token="token")

    async def test_sources(self):
        self.http_client.get.side_effect = create_source_responses()

        it = aiter(self.api.sources())
        source = await anext(it)

        self.assertEqual(source.name, "MITRE-1")
        self.assertEqual(
            source.contact_email,
            "cve@mitre.org",
        )
        self.assertEqual(
            source.source_identifiers,
            [
                "cve@mitre.org",
                "8254265b-2729-46b6-b9e3-3dfca2d5bfca",
            ],
        )
        self.assertEqual(
            source.last_modified,
            datetime(2019, 9, 9, 16, 18, 45, 930000, tzinfo=timezone.utc),
        )
        self.assertEqual(
            source.created,
            datetime(2019, 9, 9, 16, 18, 45, 930000, tzinfo=timezone.utc),
        )
        self.assertIsNone(source.v2_acceptance_level)
        self.assertEqual(
            source.v3_acceptance_level,
            AcceptanceLevel(
                "Contributor",
                datetime(2025, 1, 30, 0, 0, 20, 107000, tzinfo=timezone.utc),
            ),
        )
        self.assertEqual(
            source.cwe_acceptance_level,
            AcceptanceLevel(
                "Reference",
                datetime(2025, 1, 24, 0, 0, 0, 43000, tzinfo=timezone.utc),
            ),
        )

        self.http_client.get.assert_awaited_once_with(
            "https://services.nvd.nist.gov/rest/json/source/2.0",
            headers={"apiKey": "token"},
            params={
                "startIndex": 0,
                "resultsPerPage": MAX_SOURCES_PER_PAGE,
            },
        )

        self.http_client.get.reset_mock()

        source = await anext(it)
        self.assertEqual(source.name, "MITRE-2")
        self.http_client.get.assert_awaited_once_with(
            "https://services.nvd.nist.gov/rest/json/source/2.0",
            headers={"apiKey": "token"},
            params={"startIndex": 1, "resultsPerPage": 1},
        )

        with self.assertRaises(StopAsyncIteration):
            await anext(it)

    async def test_sources_change_dates(self):
        self.http_client.get.side_effect = create_source_responses()

        it = aiter(
            self.api.sources(
                last_modified_start_date=datetime(2025, 1, 1),
                last_modified_end_date=datetime(2025, 1, 31),
            )
        )
        source = await anext(it)

        self.assertEqual(source.name, "MITRE-1")
        self.http_client.get.assert_awaited_once_with(
            "https://services.nvd.nist.gov/rest/json/source/2.0",
            headers={"apiKey": "token"},
            params={
                "startIndex": 0,
                "lastModStartDate": "2025-01-01T00:00:00",
                "lastModEndDate": "2025-01-31T00:00:00",
                "resultsPerPage": MAX_SOURCES_PER_PAGE,
            },
        )

        self.http_client.get.reset_mock()

        source = await anext(it)

        self.assertEqual(source.name, "MITRE-2")
        self.http_client.get.assert_awaited_once_with(
            "https://services.nvd.nist.gov/rest/json/source/2.0",
            headers={"apiKey": "token"},
            params={
                "startIndex": 1,
                "lastModStartDate": "2025-01-01T00:00:00",
                "lastModEndDate": "2025-01-31T00:00:00",
                "resultsPerPage": 1,
            },
        )

        with self.assertRaises(StopAsyncIteration):
            source = await anext(it)

    async def test_sources_source_identifier(self):
        self.http_client.get.side_effect = create_source_responses()

        it = aiter(self.api.sources(source_identifier="cve@mitre.org"))
        source = await anext(it)
        self.assertEqual(source.name, "MITRE-1")

        self.http_client.get.assert_awaited_once_with(
            "https://services.nvd.nist.gov/rest/json/source/2.0",
            headers={"apiKey": "token"},
            params={
                "startIndex": 0,
                "resultsPerPage": MAX_SOURCES_PER_PAGE,
                "sourceIdentifier": "cve@mitre.org",
            },
        )

        self.http_client.get.reset_mock()

        source = await anext(it)
        self.assertEqual(source.name, "MITRE-2")
        self.http_client.get.assert_awaited_once_with(
            "https://services.nvd.nist.gov/rest/json/source/2.0",
            headers={"apiKey": "token"},
            params={
                "startIndex": 1,
                "resultsPerPage": 1,
                "sourceIdentifier": "cve@mitre.org",
            },
        )

        with self.assertRaises(StopAsyncIteration):
            await anext(it)

    async def test_context_manager(self):
        async with self.api:
            pass

        self.http_client.__aenter__.assert_awaited_once()
        self.http_client.__aexit__.assert_awaited_once()
pontos-25.3.2/tests/nvd/test_api.py000066400000000000000000000425631476255566300172750ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later
#

# pylint: disable=protected-access

import unittest
from datetime import datetime
from typing import Any, Iterator
from unittest.mock import AsyncMock, MagicMock, call, patch

from httpx import AsyncClient, Response

from pontos.nvd.api import (
    JSON,
    InvalidState,
    NoMoreResults,
    NVDApi,
    NVDResults,
    convert_camel_case,
    format_date,
)
from tests import IsolatedAsyncioTestCase, aiter, anext


class ConvertCamelCaseTestCase(unittest.TestCase):
    def test_convert(self):
        data = {
            "someValue": 123,
            "otherValue": "bar",
        }

        converted = convert_camel_case(data)
        self.assertEqual(converted["some_value"], 123)
        self.assertEqual(converted["other_value"], "bar")


class FormatDateTestCase(unittest.TestCase):
    def test_format_date(self):
        dt = datetime(2022, 12, 10, 10, 0, 12, 123)
        fd = format_date(dt)

        self.assertEqual(fd, "2022-12-10T10:00:12")


class NVDApiTestCase(IsolatedAsyncioTestCase):
    @patch("pontos.nvd.api.AsyncClient", spec=AsyncClient)
    async def test_context_manager(self, async_client: MagicMock):
        http_client = AsyncMock()
        async_client.return_value = http_client
        api = NVDApi("https://foo.bar/baz", token="token")

        async with api:
            pass

        http_client.__aenter__.assert_awaited_once()
        http_client.__aexit__.assert_awaited_once()

    @patch("pontos.nvd.api.AsyncClient", spec=AsyncClient)
    async def test_get_without_token(self, async_client: MagicMock):
        http_client = AsyncMock()
        async_client.return_value = http_client
        api = NVDApi("https://foo.bar/baz")

        await api._get()

        http_client.get.assert_awaited_once_with(
            "https://foo.bar/baz", headers={}, params=None
        )

    @patch("pontos.nvd.api.AsyncClient", spec=AsyncClient)
    async def test_get_with_token(self, async_client: MagicMock):
        http_client = AsyncMock()
        async_client.return_value = http_client
        api = NVDApi("https://foo.bar/baz", token="token")

        await api._get()

        http_client.get.assert_awaited_once_with(
            "https://foo.bar/baz", headers={"apiKey": "token"}, params=None
        )

    @patch("pontos.nvd.api.time.monotonic", autospec=True)
    @patch("pontos.nvd.api.asyncio.sleep", autospec=True)
    @patch("pontos.nvd.api.AsyncClient", spec=AsyncClient)
    async def test_rate_limit(
        self,
        async_client: MagicMock,
        sleep_mock: MagicMock,
        monotonic_mock: MagicMock,
    ):
        http_client = AsyncMock()
        async_client.return_value = http_client
        monotonic_mock.side_effect = [0.0, 10.0, 11.0]

        api = NVDApi("https://foo.bar/baz")

        await api._get()
        await api._get()
        await api._get()
        await api._get()
        await api._get()

        sleep_mock.assert_not_called()

        await api._get()

        sleep_mock.assert_called_once_with(20.0)

    @patch("pontos.nvd.api.asyncio.sleep", autospec=True)
    @patch("pontos.nvd.api.AsyncClient", spec=AsyncClient)
    async def test_no_rate_limit(
        self, async_client: MagicMock, sleep_mock: MagicMock
    ):
        http_client = AsyncMock()
        async_client.return_value = http_client
        api = NVDApi("https://foo.bar/baz", rate_limit=False)

        await api._get()
        await api._get()
        await api._get()
        await api._get()
        await api._get()

        sleep_mock.assert_not_called()

        await api._get()

        sleep_mock.assert_not_called()

    @patch("pontos.nvd.api.asyncio.sleep", autospec=True)
    @patch("pontos.nvd.api.AsyncClient", spec=AsyncClient)
    async def test_retry(
        self,
        async_client: MagicMock,
        sleep_mock: MagicMock,
    ):
        response_mocks = [
            MagicMock(spec=Response, is_server_error=True),
            MagicMock(spec=Response, is_server_error=True),
            MagicMock(spec=Response, is_server_error=True),
            MagicMock(spec=Response, is_server_error=False),
        ]
        http_client = AsyncMock()
        http_client.get.side_effect = response_mocks
        async_client.return_value = http_client

        api = NVDApi("https://foo.bar/baz", request_attempts=4)

        result = await api._get()

        calls = [call(2.0), call(4.0), call(8.0)]
        sleep_mock.assert_has_calls(calls)
        self.assertFalse(result.is_server_error)

    @patch("pontos.nvd.api.asyncio.sleep", autospec=True)
    @patch("pontos.nvd.api.AsyncClient", spec=AsyncClient)
    async def test_no_retry(
        self,
        async_client: MagicMock,
        sleep_mock: MagicMock,
    ):
        response_mock = MagicMock(spec=Response)
        response_mock.is_server_error = False

        http_client = AsyncMock()
        http_client.get.return_value = response_mock
        async_client.return_value = http_client

        api = NVDApi("https://foo.bar/baz")

        result = await api._get()

        sleep_mock.assert_not_called()
        self.assertFalse(result.is_server_error)


class Result:
    def __init__(self, value: int) -> None:
        self.value = value


def result_func(data: JSON) -> Iterator[Result]:
    return (Result(d) for d in data["values"])  # type: ignore


class NVDResultsTestCase(IsolatedAsyncioTestCase):
    async def test_items(self):
        response_mock = MagicMock(spec=Response)
        response_mock.json.side_effect = [
            {
                "values": [1, 2, 3],
                "total_results": 6,
                "results_per_page": 3,
            },
            {
                "values": [4, 5, 6],
                "total_results": 6,
                "results_per_page": 3,
            },
        ]
        api_mock = AsyncMock(spec=NVDApi)
        api_mock._get.return_value = response_mock

        results: NVDResults[Result] = NVDResults(
            api_mock,
            {},
            result_func,
        )

        it = aiter(results.items())

        result = await anext(it)
        self.assertEqual(result.value, 1)

        result = await anext(it)
        self.assertEqual(result.value, 2)

        result = await anext(it)
        self.assertEqual(result.value, 3)

        result = await anext(it)
        self.assertEqual(result.value, 4)

        result = await anext(it)
        self.assertEqual(result.value, 5)

        result = await anext(it)
        self.assertEqual(result.value, 6)

        with self.assertRaises(StopAsyncIteration):
            await anext(it)

    async def test_items_no_results(self):
        response_mock = MagicMock(spec=Response)
        response_mock.json.side_effect = [
            {
                "values": [],
                "total_results": 0,
                "results_per_page": 0,
            },
        ]
        api_mock = AsyncMock(spec=NVDApi)
        api_mock._get.return_value = response_mock

        results: NVDResults[Result] = NVDResults(
            api_mock,
            {},
            result_func,
        )

        it = aiter(results.items())

        with self.assertRaises(StopAsyncIteration):
            await anext(it)

    async def test_aiter(self):
        response_mock = MagicMock(spec=Response)
        response_mock.json.side_effect = [
            {
                "values": [1, 2, 3],
                "total_results": 6,
                "results_per_page": 3,
            },
            {
                "values": [4, 5, 6],
                "total_results": 6,
                "results_per_page": 3,
            },
        ]
        api_mock = AsyncMock(spec=NVDApi)
        api_mock._get.return_value = response_mock

        results: NVDResults[Result] = NVDResults(
            api_mock,
            {},
            result_func,
        )

        it = aiter(results)

        result = await anext(it)
        self.assertEqual(result.value, 1)

        result = await anext(it)
        self.assertEqual(result.value, 2)

        result = await anext(it)
        self.assertEqual(result.value, 3)

        result = await anext(it)
        self.assertEqual(result.value, 4)

        result = await anext(it)
        self.assertEqual(result.value, 5)

        result = await anext(it)
        self.assertEqual(result.value, 6)

        with self.assertRaises(StopAsyncIteration):
            await anext(it)

    async def test_len(self):
        response_mock = MagicMock(spec=Response)
        response_mock.json.return_value = {
            "values": [1, 2, 3],
            "total_results": 3,
            "results_per_page": 3,
        }
        api_mock = AsyncMock(spec=NVDApi)
        api_mock._get.return_value = response_mock

        results: NVDResults[Result] = NVDResults(
            api_mock,
            {},
            result_func,
        )

        with self.assertRaisesRegex(
            InvalidState, "NVDResults has not been awaited yet"
        ):
            len(results)

        await results

        self.assertEqual(len(results), 3)

    async def test_chunks(self):
        response_mock = MagicMock(spec=Response)
        response_mock.json.side_effect = [
            {
                "values": [1, 2, 3],
                "total_results": 6,
                "results_per_page": 3,
            },
            {
                "values": [4, 5, 6],
                "total_results": 6,
                "results_per_page": 3,
            },
        ]
        api_mock = AsyncMock(spec=NVDApi)
        api_mock._get.return_value = response_mock

        nvd_results: NVDResults[Result] = NVDResults(
            api_mock,
            {},
            result_func,
        )

        it = aiter(nvd_results.chunks())

        results = await anext(it)
        self.assertEqual([result.value for result in results], [1, 2, 3])

        results = await anext(it)
        self.assertEqual([result.value for result in results], [4, 5, 6])

        with self.assertRaises(StopAsyncIteration):
            await anext(it)

    async def test_json(self):
        response_mock = MagicMock(spec=Response)
        response_mock.json.side_effect = [
            {
                "values": [1, 2, 3],
                "total_results": 6,
                "results_per_page": 3,
            },
            {
                "values": [4, 5, 6],
                "total_results": 6,
                "results_per_page": 3,
            },
        ]
        api_mock = AsyncMock(spec=NVDApi)
        api_mock._get.return_value = response_mock

        nvd_results: NVDResults[Result] = NVDResults(
            api_mock,
            {},
            result_func,
        )

        json: dict[str, Any] = await nvd_results.json()  # type: ignore
        self.assertEqual(json["values"], [1, 2, 3])
        self.assertEqual(json["total_results"], 6)
        self.assertEqual(json["results_per_page"], 3)

        json: dict[str, Any] = await nvd_results.json()  # type: ignore
        self.assertEqual(json["values"], [4, 5, 6])
        self.assertEqual(json["total_results"], 6)
        self.assertEqual(json["results_per_page"], 3)

        self.assertIsNone(await nvd_results.json())

    async def test_await(self):
        response_mock = MagicMock(spec=Response)
        response_mock.json.side_effect = [
            {
                "values": [1, 2, 3],
                "total_results": 6,
                "results_per_page": 3,
            },
            {
                "values": [4, 5, 6],
                "total_results": 6,
                "results_per_page": 3,
            },
        ]
        api_mock = AsyncMock(spec=NVDApi)
        api_mock._get.return_value = response_mock

        nvd_results: NVDResults[Result] = NVDResults(
            api_mock,
            {},
            result_func,
        )

        await nvd_results
        self.assertEqual(len(nvd_results), 6)

        json: dict[str, Any] = await nvd_results.json()  # type: ignore
        self.assertEqual(json["values"], [1, 2, 3])
        self.assertEqual(json["total_results"], 6)
        self.assertEqual(json["results_per_page"], 3)

        await nvd_results
        json: dict[str, Any] = await nvd_results.json()  # type: ignore
        self.assertEqual(json["values"], [4, 5, 6])
        self.assertEqual(json["total_results"], 6)
        self.assertEqual(json["results_per_page"], 3)

        with self.assertRaises(NoMoreResults):
            await nvd_results

    async def test_mix_and_match(self):
        response_mock = MagicMock(spec=Response)
        response_mock.json.side_effect = [
            {
                "values": [1, 2, 3],
                "total_results": 6,
                "results_per_page": 3,
            },
            {
                "values": [4, 5, 6],
                "total_results": 6,
                "results_per_page": 3,
            },
        ]
        api_mock = AsyncMock(spec=NVDApi)
        api_mock._get.return_value = response_mock

        nvd_results: NVDResults[Result] = NVDResults(
            api_mock,
            {},
            result_func,
        )

        await nvd_results
        self.assertEqual(len(nvd_results), 6)

        json: dict[str, Any] = await nvd_results.json()  # type: ignore
        self.assertEqual(json["values"], [1, 2, 3])
        self.assertEqual(json["total_results"], 6)
        self.assertEqual(json["results_per_page"], 3)

        self.assertEqual(
            [result.value async for result in nvd_results], [1, 2, 3, 4, 5, 6]
        )

        json: dict[str, Any] = await nvd_results.json()  # type: ignore
        self.assertEqual(json["values"], [4, 5, 6])
        self.assertEqual(json["total_results"], 6)
        self.assertEqual(json["results_per_page"], 3)

    async def test_response_error(self):
        response_mock = MagicMock(spec=Response)
        response_mock.json.side_effect = [
            {
                "values": [1, 2, 3],
                "total_results": 6,
                "results_per_page": 3,
            },
        ]
        api_mock = AsyncMock(spec=NVDApi)
        api_mock._get.return_value = response_mock

        nvd_results: NVDResults[Result] = NVDResults(
            api_mock,
            {},
            result_func,
        )

        json = await nvd_results.json()
        self.assertEqual(json["values"], [1, 2, 3])  # type: ignore

        api_mock._get.assert_called_once_with(params={"startIndex": 0})

        response_mock.raise_for_status.side_effect = Exception("Server Error")

        api_mock.reset_mock()

        with self.assertRaises(Exception):
            json = await nvd_results.json()

        api_mock._get.assert_called_once_with(
            params={
                "startIndex": 3,
                "resultsPerPage": 3,
            }
        )

        response_mock.reset_mock(return_value=True, side_effect=True)
        api_mock.reset_mock()

        response_mock.json.side_effect = [
            {
                "values": [4, 5, 6],
                "total_results": 6,
                "results_per_page": 3,
            },
        ]

        json = await nvd_results.json()
        self.assertEqual(json["values"], [4, 5, 6])  # type: ignore

        api_mock._get.assert_called_once_with(
            params={
                "startIndex": 3,
                "resultsPerPage": 3,
            }
        )

    async def test_request_results_limit(self):
        response_mock = MagicMock(spec=Response)
        response_mock.json.side_effect = [
            {
                "values": [1, 2, 3, 4],
                "total_results": 5,
                "results_per_page": 4,
            },
            {
                "values": [5],
                "total_results": 5,
                "results_per_page": 1,
            },
        ]
        api_mock = AsyncMock(spec=NVDApi)
        api_mock._get.return_value = response_mock

        nvd_results: NVDResults[Result] = NVDResults(
            api_mock,
            {},
            result_func,
            request_results=5,
        )

        json: dict[str, Any] = await nvd_results.json()  # type: ignore
        self.assertEqual(json["values"], [1, 2, 3, 4])
        self.assertEqual(json["total_results"], 5)
        self.assertEqual(json["results_per_page"], 4)

        api_mock._get.assert_called_once_with(params={"startIndex": 0})
        api_mock.reset_mock()

        json: dict[str, Any] = await nvd_results.json()  # type: ignore
        self.assertEqual(json["values"], [5])
        self.assertEqual(json["total_results"], 5)
        self.assertEqual(json["results_per_page"], 1)

        api_mock._get.assert_called_once_with(
            params={"startIndex": 4, "resultsPerPage": 1}
        )

    async def test_repr(self):
        response_mock = MagicMock(spec=Response)
        response_mock.json.side_effect = [
            {
                "values": [1, 2, 3, 4],
                "total_results": 5,
                "results_per_page": 4,
            },
            {
                "values": [5],
                "total_results": 5,
                "results_per_page": 1,
            },
        ]
        response_mock.url = "https://some.url&startIndex=0"
        api_mock = AsyncMock(spec=NVDApi)
        api_mock._get.return_value = response_mock

        nvd_results: NVDResults[Result] = NVDResults(
            api_mock,
            {},
            result_func,
        )

        await nvd_results

        self.assertEqual(
            repr(nvd_results),
            '",
        )
pontos-25.3.2/tests/release/000077500000000000000000000000001476255566300157325ustar00rootroot00000000000000pontos-25.3.2/tests/release/__init__.py000066400000000000000000000001411476255566300200370ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2020-2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later
#
pontos-25.3.2/tests/release/test_create.py000066400000000000000000002634011476255566300206140ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2020-2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later
#

# pylint: disable=too-many-lines, line-too-long, invalid-name

import unittest
from contextlib import contextmanager
from dataclasses import dataclass
from datetime import datetime
from pathlib import Path
from typing import Iterable, Iterator, Optional, Union
from unittest.mock import AsyncMock, MagicMock, call, patch

from httpx import HTTPStatusError, Request, Response

from pontos.git import ConfigScope, Git, ResetMode, StatusEntry
from pontos.github.actions.errors import GitHubActionsError
from pontos.release.create import (
    CreateReleaseCommand,
    CreateReleaseReturnValue,
    ReleaseInformation,
    create_release,
)
from pontos.release.helper import ReleaseType
from pontos.release.main import parse_args
from pontos.terminal import Terminal
from pontos.testing import temp_directory, temp_git_repository
from pontos.version import VersionError, VersionUpdate
from pontos.version.commands import GoVersionCommand
from pontos.version.schemes._pep440 import PEP440Version, PEP440VersioningScheme

TOKEN = "foo"
REPOSITORY = "greenbone/foo"
GIT_SIGNING_KEY = "1234"
CHANGELOG = "A Changelog"


def mock_terminal() -> MagicMock:
    return MagicMock(spec=Terminal)


def str_or_list(values: Union[str, Iterable[str]]) -> Iterable[str]:
    if values and isinstance(values, str):
        return [values]
    return values


@contextmanager
def setup_go_project(
    *, current_version: str, tags: Union[str, Iterable[str], None] = None
) -> Iterator[Path]:
    with temp_git_repository() as tmp_git:
        git = Git(tmp_git)

        git.config("commit.gpgSign", "false", scope=ConfigScope.LOCAL)
        git.config("tag.gpgSign", "false", scope=ConfigScope.LOCAL)
        git.config("tag.sort", "refname", scope=ConfigScope.LOCAL)

        git.add_remote("origin", "http://foo/bar.git")

        go = GoVersionCommand(PEP440VersioningScheme)
        go.project_file_path.touch()
        update = go.update_version(new_version=PEP440Version(current_version))

        git.add(update.changed_files)
        git.add(go.project_file_path)
        git.commit("Create initial release")

        if tags:
            for tag in str_or_list(tags):
                git.tag(f"v{tag}")

        yield tmp_git


class ReleaseInformationTestCase(unittest.TestCase):
    def test_release_info(self):
        release_info = ReleaseInformation(
            last_release_version=PEP440Version.from_string("1.2.3"),
            release_version=PEP440Version.from_string("2.0.0"),
            git_release_tag="v2.0.0",
            next_version=PEP440Version.from_string("2.0.1.dev1"),
        )

        self.assertEqual(
            release_info.last_release_version,
            PEP440Version.from_string("1.2.3"),
        )
        self.assertEqual(
            release_info.release_version, PEP440Version.from_string("2.0.0")
        )
        self.assertEqual(release_info.git_release_tag, "v2.0.0")
        self.assertEqual(
            release_info.next_version, PEP440Version.from_string("2.0.1.dev1")
        )

    @patch.dict("os.environ", {}, clear=True)
    def test_no_github_output(self):
        release_info = ReleaseInformation(
            last_release_version=PEP440Version.from_string("1.2.3"),
            release_version=PEP440Version.from_string("2.0.0"),
            git_release_tag="v2.0.0",
            next_version=PEP440Version.from_string("2.0.1.dev1"),
        )

        with self.assertRaisesRegex(
            GitHubActionsError,
            "GITHUB_OUTPUT environment variable not set. Can't write "
            "action output.",
        ):
            release_info.write_github_output()

    def test_github_output(self):
        expected = """last-release-version=1.2.3
release-version=2.0.0
git-release-tag=v2.0.0
next-version=2.0.1.dev1
"""
        with temp_directory() as temp_dir:
            out_file = temp_dir / "out.txt"
            with patch.dict(
                "os.environ", {"GITHUB_OUTPUT": str(out_file.absolute())}
            ):
                release_info = ReleaseInformation(
                    last_release_version=PEP440Version.from_string("1.2.3"),
                    release_version=PEP440Version.from_string("2.0.0"),
                    git_release_tag="v2.0.0",
                    next_version=PEP440Version.from_string("2.0.1.dev1"),
                )

                release_info.write_github_output()

            self.assertTrue(out_file.exists())
            actual = out_file.read_text(encoding="utf8")
            self.assertEqual(actual, expected)


class CreateReleaseCommandTestCase(unittest.TestCase):
    @patch(
        "pontos.release.create.CreateReleaseCommand._create_release",
        autospec=True,
    )
    @patch("pontos.release.create.Project._gather_commands", autospec=True)
    def test_release_version(
        self,
        gather_commands_mock: MagicMock,
        create_release_mock: AsyncMock,
    ):
        current_version = PEP440Version("0.0.1")
        release_version = PEP440Version("0.0.2")
        next_version = PEP440Version("1.0.0.dev1")
        git_mock = MagicMock(spec=Git)
        command_mock = MagicMock(spec=GoVersionCommand)
        command_mock.update_version.side_effect = [
            VersionUpdate(
                previous=current_version,
                new=release_version,
                changed_files=[Path("MyProject.conf")],
            ),
            VersionUpdate(
                previous=release_version,
                new=next_version,
                changed_files=[Path("MyProject.conf")],
            ),
        ]
        git_mock.status.return_value = [StatusEntry("M  MyProject.conf")]
        gather_commands_mock.return_value = [command_mock]

        with temp_git_repository():
            released = CreateReleaseCommand(
                git=git_mock,
            ).run(
                token=TOKEN,
                repository=REPOSITORY,
                git_signing_key=GIT_SIGNING_KEY,
                versioning_scheme=PEP440VersioningScheme,
                release_type=ReleaseType.VERSION,
                release_version=release_version,
                last_release_version=current_version,
                next_version=PEP440Version("1.0.0.dev1"),
                changelog=CHANGELOG,
            )

        git_mock.push.assert_has_calls(
            [
                call(follow_tags=True, remote=None),
                call(follow_tags=True, remote=None),
            ],
        )

        command_mock.update_version.assert_has_calls(
            [
                call(release_version, force=False),
                call(next_version, force=False),
            ]
        )

        self.assertEqual(
            create_release_mock.await_args.args[1:],  # type: ignore[union-attr]
            (release_version, "foo", "A Changelog", False),
        )

        git_mock.add.assert_has_calls(
            [call(Path("MyProject.conf")), call(Path("MyProject.conf"))]
        )
        git_mock.commit.assert_has_calls(
            [
                call(
                    "Automatic release to 0.0.2",
                    verify=False,
                    gpg_signing_key="1234",
                ),
                call(
                    "Automatic adjustments after release [skip ci]\n\n"
                    "* Update to version 1.0.0.dev1\n",
                    verify=False,
                    gpg_signing_key="1234",
                ),
            ]
        )
        git_mock.tag.assert_called_once_with(
            "v0.0.2", gpg_key_id="1234", message="Automatic release to 0.0.2"
        )

        self.assertEqual(released, CreateReleaseReturnValue.SUCCESS)

    @patch(
        "pontos.release.create.CreateReleaseCommand._create_release",
        autospec=True,
    )
    @patch("pontos.release.create.Project._gather_commands", autospec=True)
    def test_release_with_repository(
        self,
        gather_commands_mock: MagicMock,
        create_release_mock: AsyncMock,
    ):
        current_version = PEP440Version("0.0.1")
        release_version = PEP440Version("0.0.2")
        next_version = PEP440Version("1.0.0.dev1")
        command_mock = MagicMock(spec=GoVersionCommand)
        command_mock.update_version.side_effect = [
            VersionUpdate(
                previous=current_version,
                new=release_version,
                changed_files=[Path("MyProject.conf")],
            ),
            VersionUpdate(
                previous=release_version,
                new=next_version,
                changed_files=[Path("MyProject.conf")],
            ),
        ]
        git_mock = MagicMock(spec=Git)
        git_mock.status.return_value = [StatusEntry("M  MyProject.conf")]
        gather_commands_mock.return_value = [command_mock]

        with temp_git_repository():
            released = CreateReleaseCommand(
                git=git_mock,
            ).run(
                token=TOKEN,
                repository="foo/bar",
                git_signing_key=GIT_SIGNING_KEY,
                versioning_scheme=PEP440VersioningScheme,
                release_type=ReleaseType.VERSION,
                release_version=release_version,
                last_release_version=current_version,
                next_version=next_version,
                changelog=CHANGELOG,
            )

        git_mock.push.assert_has_calls(
            [
                call(follow_tags=True, remote=None),
                call(follow_tags=True, remote=None),
            ],
        )

        command_mock.update_version.assert_has_calls(
            [
                call(release_version, force=False),
                call(next_version, force=False),
            ]
        )

        self.assertEqual(
            create_release_mock.await_args.args[1:],  # type: ignore[union-attr]
            (release_version, "foo", "A Changelog", False),
        )

        git_mock.add.assert_has_calls(
            [call(Path("MyProject.conf")), call(Path("MyProject.conf"))]
        )
        git_mock.commit.assert_has_calls(
            [
                call(
                    "Automatic release to 0.0.2",
                    verify=False,
                    gpg_signing_key="1234",
                ),
                call(
                    "Automatic adjustments after release [skip ci]\n\n"
                    "* Update to version 1.0.0.dev1\n",
                    verify=False,
                    gpg_signing_key="1234",
                ),
            ]
        )
        git_mock.tag.assert_called_once_with(
            "v0.0.2", gpg_key_id="1234", message="Automatic release to 0.0.2"
        )

        self.assertEqual(released, CreateReleaseReturnValue.SUCCESS)

    @patch("pontos.release.create.get_last_release_version", autospec=True)
    @patch(
        "pontos.release.create.CreateReleaseCommand._create_release",
        autospec=True,
    )
    @patch("pontos.release.create.Project._gather_commands", autospec=True)
    def test_initial_release_version(
        self,
        gather_commands_mock: MagicMock,
        create_release_mock: AsyncMock,
        get_last_release_version_mock: MagicMock,
    ):
        current_version = PEP440Version("0.0.1")
        release_version = PEP440Version("1.0.0")
        next_version = PEP440Version("1.0.1.dev1")
        command_mock = MagicMock(spec=GoVersionCommand)
        command_mock.update_version.side_effect = [
            VersionUpdate(
                previous=current_version,
                new=release_version,
                changed_files=[Path("MyProject.conf")],
            ),
            VersionUpdate(
                previous=release_version,
                new=next_version,
                changed_files=[Path("MyProject.conf")],
            ),
        ]
        git_mock = MagicMock(spec=Git)
        git_mock.status.return_value = [StatusEntry("M  MyProject.conf")]
        gather_commands_mock.return_value = [command_mock]
        get_last_release_version_mock.return_value = None

        with temp_git_repository():
            released = CreateReleaseCommand(
                git=git_mock,
            ).run(
                token=TOKEN,
                repository=REPOSITORY,
                git_signing_key=GIT_SIGNING_KEY,
                versioning_scheme=PEP440VersioningScheme,
                release_type=ReleaseType.VERSION,
                release_version=release_version,
                changelog=CHANGELOG,
            )

        git_mock.push.assert_has_calls(
            [
                call(follow_tags=True, remote=None),
                call(follow_tags=True, remote=None),
            ],
        )

        command_mock.update_version.assert_has_calls(
            [
                call(release_version, force=False),
                call(next_version, force=False),
            ]
        )

        self.assertEqual(
            create_release_mock.await_args.args[1:],  # type: ignore[union-attr]
            (release_version, "foo", "A Changelog", False),
        )

        git_mock.add.assert_has_calls(
            [call(Path("MyProject.conf")), call(Path("MyProject.conf"))]
        )
        git_mock.commit.assert_has_calls(
            [
                call(
                    "Automatic release to 1.0.0",
                    verify=False,
                    gpg_signing_key="1234",
                ),
                call(
                    "Automatic adjustments after release [skip ci]\n\n"
                    "* Update to version 1.0.1.dev1\n",
                    verify=False,
                    gpg_signing_key="1234",
                ),
            ]
        )
        git_mock.tag.assert_called_once_with(
            "v1.0.0", gpg_key_id="1234", message="Automatic release to 1.0.0"
        )

        self.assertEqual(released, CreateReleaseReturnValue.SUCCESS)

    @patch(
        "pontos.release.create.CreateReleaseCommand._create_release",
        autospec=True,
    )
    @patch("pontos.release.create.Project._gather_commands", autospec=True)
    def test_release_patch(
        self,
        gather_commands_mock: MagicMock,
        create_release_mock: AsyncMock,
    ):
        current_version = PEP440Version("0.0.1")
        release_version = PEP440Version("0.0.2")
        next_version = PEP440Version("0.0.3.dev1")
        command_mock = MagicMock(spec=GoVersionCommand)
        command_mock.update_version.side_effect = [
            VersionUpdate(
                previous=current_version,
                new=release_version,
                changed_files=[Path("MyProject.conf")],
            ),
            VersionUpdate(
                previous=release_version,
                new=next_version,
                changed_files=[Path("MyProject.conf")],
            ),
        ]
        git_mock = MagicMock(spec=Git)
        git_mock.status.return_value = [StatusEntry("M  MyProject.conf")]
        gather_commands_mock.return_value = [command_mock]

        with temp_git_repository():
            released = CreateReleaseCommand(
                git=git_mock,
            ).run(
                token=TOKEN,
                repository=REPOSITORY,
                git_signing_key=GIT_SIGNING_KEY,
                versioning_scheme=PEP440VersioningScheme,
                release_type=ReleaseType.PATCH,
                last_release_version=current_version,
                changelog=CHANGELOG,
            )

        git_mock.push.assert_has_calls(
            [
                call(follow_tags=True, remote=None),
                call(follow_tags=True, remote=None),
            ],
        )
        command_mock.update_version.assert_has_calls(
            [
                call(release_version, force=False),
                call(next_version, force=False),
            ],
        )

        self.assertEqual(
            create_release_mock.await_args.args[1:],  # type: ignore[union-attr]
            (release_version, "foo", "A Changelog", False),
        )

        git_mock.add.assert_has_calls(
            [call(Path("MyProject.conf")), call(Path("MyProject.conf"))]
        )
        git_mock.commit.assert_has_calls(
            [
                call(
                    "Automatic release to 0.0.2",
                    verify=False,
                    gpg_signing_key="1234",
                ),
                call(
                    "Automatic adjustments after release [skip ci]\n\n"
                    "* Update to version 0.0.3.dev1\n",
                    verify=False,
                    gpg_signing_key="1234",
                ),
            ]
        )
        git_mock.tag.assert_called_once_with(
            "v0.0.2", gpg_key_id="1234", message="Automatic release to 0.0.2"
        )

        self.assertEqual(released, CreateReleaseReturnValue.SUCCESS)

    @patch(
        "pontos.release.create.CreateReleaseCommand._create_release",
        autospec=True,
    )
    @patch("pontos.release.create.Project._gather_commands", autospec=True)
    def test_release_calendar(
        self,
        gather_commands_mock: MagicMock,
        create_release_mock: AsyncMock,
    ):
        today = datetime.today()
        current_version = PEP440Version("0.0.1")
        release_version = PEP440Version(f"{today.year % 100}.{today.month}.0")
        next_version = PEP440Version(f"{today.year % 100}.{today.month}.1.dev1")
        command_mock = MagicMock(spec=GoVersionCommand)
        command_mock.update_version.side_effect = [
            VersionUpdate(
                previous=current_version,
                new=release_version,
                changed_files=[Path("MyProject.conf")],
            ),
            VersionUpdate(
                previous=release_version,
                new=next_version,
                changed_files=[Path("MyProject.conf")],
            ),
        ]
        git_mock = MagicMock(spec=Git)
        git_mock.status.return_value = [StatusEntry("M  MyProject.conf")]
        gather_commands_mock.return_value = [command_mock]

        with temp_git_repository():
            released = CreateReleaseCommand(
                git=git_mock,
            ).run(
                token=TOKEN,
                repository=REPOSITORY,
                git_signing_key=GIT_SIGNING_KEY,
                versioning_scheme=PEP440VersioningScheme,
                release_type=ReleaseType.CALENDAR,
                last_release_version=current_version,
                changelog=CHANGELOG,
            )

        git_mock.push.assert_has_calls(
            [
                call(follow_tags=True, remote=None),
                call(follow_tags=True, remote=None),
            ],
        )

        command_mock.update_version.assert_has_calls(
            [
                call(release_version, force=False),
                call(next_version, force=False),
            ]
        )

        self.assertEqual(
            create_release_mock.await_args.args[1:],  # type: ignore[union-attr]
            (release_version, "foo", "A Changelog", False),
        )

        git_mock.add.assert_has_calls(
            [call(Path("MyProject.conf")), call(Path("MyProject.conf"))]
        )
        git_mock.commit.assert_has_calls(
            [
                call(
                    f"Automatic release to {release_version}",
                    verify=False,
                    gpg_signing_key="1234",
                ),
                call(
                    "Automatic adjustments after release [skip ci]\n\n"
                    f"* Update to version {next_version}\n",
                    verify=False,
                    gpg_signing_key="1234",
                ),
            ]
        )
        git_mock.tag.assert_called_once_with(
            f"v{release_version}",
            gpg_key_id="1234",
            message=f"Automatic release to {release_version}",
        )

        self.assertEqual(released, CreateReleaseReturnValue.SUCCESS)

    @patch(
        "pontos.release.create.CreateReleaseCommand._create_release",
        autospec=True,
    )
    @patch("pontos.release.create.Project._gather_commands", autospec=True)
    def test_release_minor(
        self,
        gather_commands_mock: MagicMock,
        create_release_mock: AsyncMock,
    ):
        current_version = PEP440Version("0.0.1")
        release_version = PEP440Version("0.1.0")
        next_version = PEP440Version("0.1.1.dev1")
        command_mock = MagicMock(spec=GoVersionCommand)
        command_mock.update_version.side_effect = [
            VersionUpdate(
                previous=current_version,
                new=release_version,
                changed_files=[Path("MyProject.conf")],
            ),
            VersionUpdate(
                previous=release_version,
                new=next_version,
                changed_files=[Path("MyProject.conf")],
            ),
        ]
        git_mock = MagicMock(spec=Git)
        git_mock.status.return_value = [StatusEntry("M  MyProject.conf")]
        gather_commands_mock.return_value = [command_mock]

        with temp_git_repository():
            released = CreateReleaseCommand(
                git=git_mock,
            ).run(
                token=TOKEN,
                repository=REPOSITORY,
                git_signing_key=GIT_SIGNING_KEY,
                versioning_scheme=PEP440VersioningScheme,
                release_type=ReleaseType.MINOR,
                last_release_version=current_version,
                changelog=CHANGELOG,
            )

        git_mock.push.assert_has_calls(
            [
                call(follow_tags=True, remote=None),
                call(follow_tags=True, remote=None),
            ],
        )
        command_mock.update_version.assert_has_calls(
            [
                call(release_version, force=False),
                call(next_version, force=False),
            ],
        )

        self.assertEqual(
            create_release_mock.await_args.args[1:],  # type: ignore[union-attr]
            (release_version, "foo", "A Changelog", False),
        )

        git_mock.add.assert_has_calls(
            [call(Path("MyProject.conf")), call(Path("MyProject.conf"))]
        )
        git_mock.commit.assert_has_calls(
            [
                call(
                    "Automatic release to 0.1.0",
                    verify=False,
                    gpg_signing_key="1234",
                ),
                call(
                    "Automatic adjustments after release [skip ci]\n\n"
                    "* Update to version 0.1.1.dev1\n",
                    verify=False,
                    gpg_signing_key="1234",
                ),
            ]
        )
        git_mock.tag.assert_called_once_with(
            "v0.1.0", gpg_key_id="1234", message="Automatic release to 0.1.0"
        )

        self.assertEqual(released, CreateReleaseReturnValue.SUCCESS)

    @patch(
        "pontos.release.create.CreateReleaseCommand._create_release",
        autospec=True,
    )
    @patch("pontos.release.create.Project._gather_commands", autospec=True)
    def test_release_major(
        self,
        gather_commands_mock: MagicMock,
        create_release_mock: AsyncMock,
    ):
        current_version = PEP440Version("0.0.1")
        release_version = PEP440Version("1.0.0")
        next_version = PEP440Version("1.0.1.dev1")
        command_mock = MagicMock(spec=GoVersionCommand)
        command_mock.update_version.side_effect = [
            VersionUpdate(
                previous=current_version,
                new=release_version,
                changed_files=[Path("MyProject.conf")],
            ),
            VersionUpdate(
                previous=release_version,
                new=next_version,
                changed_files=[Path("MyProject.conf")],
            ),
        ]
        git_mock = MagicMock(spec=Git)
        git_mock.status.return_value = [StatusEntry("M  MyProject.conf")]
        gather_commands_mock.return_value = [command_mock]

        with temp_git_repository():
            released = CreateReleaseCommand(
                git=git_mock,
            ).run(
                token=TOKEN,
                repository=REPOSITORY,
                git_signing_key=GIT_SIGNING_KEY,
                versioning_scheme=PEP440VersioningScheme,
                release_type=ReleaseType.MAJOR,
                last_release_version=current_version,
                changelog=CHANGELOG,
            )

        git_mock.push.assert_has_calls(
            [
                call(follow_tags=True, remote=None),
                call(follow_tags=True, remote=None),
            ],
        )
        command_mock.update_version.assert_has_calls(
            [
                call(release_version, force=False),
                call(next_version, force=False),
            ],
        )

        self.assertEqual(
            create_release_mock.await_args.args[1:],  # type: ignore[union-attr]
            (release_version, "foo", "A Changelog", False),
        )

        git_mock.add.assert_has_calls(
            [call(Path("MyProject.conf")), call(Path("MyProject.conf"))]
        )
        git_mock.commit.assert_has_calls(
            [
                call(
                    "Automatic release to 1.0.0",
                    verify=False,
                    gpg_signing_key="1234",
                ),
                call(
                    "Automatic adjustments after release [skip ci]\n\n"
                    "* Update to version 1.0.1.dev1\n",
                    verify=False,
                    gpg_signing_key="1234",
                ),
            ]
        )
        git_mock.tag.assert_called_once_with(
            "v1.0.0", gpg_key_id="1234", message="Automatic release to 1.0.0"
        )

        self.assertEqual(released, CreateReleaseReturnValue.SUCCESS)

    @patch(
        "pontos.release.create.CreateReleaseCommand._create_release",
        autospec=True,
    )
    @patch("pontos.release.create.Project._gather_commands", autospec=True)
    def test_release_alpha(
        self,
        gather_commands_mock: MagicMock,
        create_release_mock: AsyncMock,
    ):
        current_version = PEP440Version("0.0.1")
        release_version = PEP440Version("0.0.2a1")
        next_version = PEP440Version("0.0.2a2.dev1")
        command_mock = MagicMock(spec=GoVersionCommand)
        command_mock.update_version.side_effect = [
            VersionUpdate(
                previous=current_version,
                new=release_version,
                changed_files=[Path("MyProject.conf")],
            ),
            VersionUpdate(
                previous=release_version,
                new=next_version,
                changed_files=[Path("MyProject.conf")],
            ),
        ]
        git_mock = MagicMock(spec=Git)
        git_mock.status.return_value = [StatusEntry("M  MyProject.conf")]
        gather_commands_mock.return_value = [command_mock]

        with temp_git_repository():
            released = CreateReleaseCommand(
                git=git_mock,
            ).run(
                token=TOKEN,
                repository=REPOSITORY,
                git_signing_key=GIT_SIGNING_KEY,
                versioning_scheme=PEP440VersioningScheme,
                release_type=ReleaseType.ALPHA,
                last_release_version=current_version,
                changelog=CHANGELOG,
            )

        git_mock.push.assert_has_calls(
            [
                call(follow_tags=True, remote=None),
                call(follow_tags=True, remote=None),
            ],
        )
        command_mock.update_version.assert_has_calls(
            [
                call(release_version, force=False),
                call(next_version, force=False),
            ],
        )

        self.assertEqual(
            create_release_mock.await_args.args[1:],  # type: ignore[union-attr]
            (release_version, "foo", "A Changelog", False),
        )

        git_mock.add.assert_has_calls(
            [call(Path("MyProject.conf")), call(Path("MyProject.conf"))]
        )
        git_mock.commit.assert_has_calls(
            [
                call(
                    "Automatic release to 0.0.2a1",
                    verify=False,
                    gpg_signing_key="1234",
                ),
                call(
                    "Automatic adjustments after release [skip ci]\n\n"
                    "* Update to version 0.0.2a2.dev1\n",
                    verify=False,
                    gpg_signing_key="1234",
                ),
            ]
        )
        git_mock.tag.assert_called_once_with(
            "v0.0.2a1",
            gpg_key_id="1234",
            message="Automatic release to 0.0.2a1",
        )

        self.assertEqual(released, CreateReleaseReturnValue.SUCCESS)

    @patch(
        "pontos.release.create.CreateReleaseCommand._create_release",
        autospec=True,
    )
    @patch("pontos.release.create.Project._gather_commands", autospec=True)
    def test_release_beta(
        self,
        gather_commands_mock: MagicMock,
        create_release_mock: AsyncMock,
    ):
        current_version = PEP440Version("0.0.1")
        release_version = PEP440Version("0.0.2b1")
        next_version = PEP440Version("0.0.2b2.dev1")
        command_mock = MagicMock(spec=GoVersionCommand)
        command_mock.update_version.side_effect = [
            VersionUpdate(
                previous=current_version,
                new=release_version,
                changed_files=[Path("MyProject.conf")],
            ),
            VersionUpdate(
                previous=release_version,
                new=next_version,
                changed_files=[Path("MyProject.conf")],
            ),
        ]
        git_mock = MagicMock(spec=Git)
        git_mock.status.return_value = [StatusEntry("M  MyProject.conf")]
        gather_commands_mock.return_value = [command_mock]

        with temp_git_repository():
            released = CreateReleaseCommand(
                git=git_mock,
            ).run(
                token=TOKEN,
                repository=REPOSITORY,
                git_signing_key=GIT_SIGNING_KEY,
                versioning_scheme=PEP440VersioningScheme,
                release_type=ReleaseType.BETA,
                last_release_version=current_version,
                changelog=CHANGELOG,
            )

        git_mock.push.assert_has_calls(
            [
                call(follow_tags=True, remote=None),
                call(follow_tags=True, remote=None),
            ],
        )
        command_mock.update_version.assert_has_calls(
            [
                call(release_version, force=False),
                call(next_version, force=False),
            ],
        )

        self.assertEqual(
            create_release_mock.await_args.args[1:],  # type: ignore[union-attr]
            (release_version, "foo", "A Changelog", False),
        )

        git_mock.add.assert_has_calls(
            [call(Path("MyProject.conf")), call(Path("MyProject.conf"))]
        )
        git_mock.commit.assert_has_calls(
            [
                call(
                    "Automatic release to 0.0.2b1",
                    verify=False,
                    gpg_signing_key="1234",
                ),
                call(
                    "Automatic adjustments after release [skip ci]\n\n"
                    "* Update to version 0.0.2b2.dev1\n",
                    verify=False,
                    gpg_signing_key="1234",
                ),
            ]
        )
        git_mock.tag.assert_called_once_with(
            "v0.0.2b1",
            gpg_key_id="1234",
            message="Automatic release to 0.0.2b1",
        )

        self.assertEqual(released, CreateReleaseReturnValue.SUCCESS)

    @patch(
        "pontos.release.create.CreateReleaseCommand._create_release",
        autospec=True,
    )
    @patch("pontos.release.create.Project._gather_commands", autospec=True)
    def test_release_release_candidate(
        self,
        gather_commands_mock: MagicMock,
        create_release_mock: AsyncMock,
    ):
        current_version = PEP440Version("0.0.1")
        release_version = PEP440Version("0.0.2rc1")
        next_version = PEP440Version("0.0.2rc2.dev1")
        command_mock = MagicMock(spec=GoVersionCommand)
        command_mock.update_version.side_effect = [
            VersionUpdate(
                previous=current_version,
                new=release_version,
                changed_files=[Path("MyProject.conf")],
            ),
            VersionUpdate(
                previous=release_version,
                new=next_version,
                changed_files=[Path("MyProject.conf")],
            ),
        ]
        git_mock = MagicMock(spec=Git)
        git_mock.status.return_value = [StatusEntry("M  MyProject.conf")]
        gather_commands_mock.return_value = [command_mock]

        with temp_git_repository():
            released = CreateReleaseCommand(
                git=git_mock,
            ).run(
                token=TOKEN,
                repository=REPOSITORY,
                git_signing_key=GIT_SIGNING_KEY,
                versioning_scheme=PEP440VersioningScheme,
                release_type=ReleaseType.RELEASE_CANDIDATE,
                last_release_version=current_version,
                changelog=CHANGELOG,
            )

        git_mock.push.assert_has_calls(
            [
                call(follow_tags=True, remote=None),
                call(follow_tags=True, remote=None),
            ],
        )
        command_mock.update_version.assert_has_calls(
            [
                call(release_version, force=False),
                call(next_version, force=False),
            ],
        )

        self.assertEqual(
            create_release_mock.await_args.args[1:],  # type: ignore[union-attr]
            (release_version, "foo", "A Changelog", False),
        )

        git_mock.add.assert_has_calls(
            [call(Path("MyProject.conf")), call(Path("MyProject.conf"))]
        )
        git_mock.commit.assert_has_calls(
            [
                call(
                    "Automatic release to 0.0.2rc1",
                    verify=False,
                    gpg_signing_key="1234",
                ),
                call(
                    "Automatic adjustments after release [skip ci]\n\n"
                    "* Update to version 0.0.2rc2.dev1\n",
                    verify=False,
                    gpg_signing_key="1234",
                ),
            ]
        )
        git_mock.tag.assert_called_once_with(
            "v0.0.2rc1",
            gpg_key_id="1234",
            message="Automatic release to 0.0.2rc1",
        )

        self.assertEqual(released, CreateReleaseReturnValue.SUCCESS)

    def test_invalid_repository(
        self,
    ):
        git_mock = MagicMock(spec=Git)
        cmd = CreateReleaseCommand(
            git=git_mock,
        )

        released = cmd.run(
            token=TOKEN,
            repository="foo/bar/baz",
            git_signing_key=GIT_SIGNING_KEY,
            versioning_scheme=PEP440VersioningScheme,
            release_type=ReleaseType.MAJOR,
        )

        self.assertEqual(released, CreateReleaseReturnValue.INVALID_REPOSITORY)

        released = cmd.run(
            token=TOKEN,
            repository="foo_bar_baz",
            git_signing_key=GIT_SIGNING_KEY,
            versioning_scheme=PEP440VersioningScheme,
            release_type=ReleaseType.MAJOR,
        )

        self.assertEqual(released, CreateReleaseReturnValue.INVALID_REPOSITORY)

    def test_no_project_settings(
        self,
    ):
        git_mock = MagicMock(spec=Git)

        with temp_git_repository():
            released = CreateReleaseCommand(
                git=git_mock,
            ).run(
                token=TOKEN,
                repository=REPOSITORY,
                git_signing_key=GIT_SIGNING_KEY,
                versioning_scheme=PEP440VersioningScheme,
                release_type=ReleaseType.VERSION,
                release_version=PEP440Version("0.0.1"),
                last_release_version=PEP440Version("0.0.1"),
            )

        self.assertEqual(
            released, CreateReleaseReturnValue.PROJECT_SETTINGS_NOT_FOUND
        )

    @patch(
        "pontos.release.create.CreateReleaseCommand._create_release",
        autospec=True,
    )
    @patch("pontos.release.create.Project", autospec=True)
    def test_no_update_project(
        self,
        project_mock: MagicMock,
        create_release_mock: AsyncMock,
    ):
        release_version = PEP440Version("0.0.2")
        git_mock = MagicMock(spec=Git)
        git_mock.status.return_value = []

        with temp_git_repository():
            released = CreateReleaseCommand(
                git=git_mock,
            ).run(
                token=TOKEN,
                repository=REPOSITORY,
                git_signing_key=GIT_SIGNING_KEY,
                versioning_scheme=PEP440VersioningScheme,
                release_type=ReleaseType.VERSION,
                release_version=PEP440Version("0.0.2"),
                last_release_version=PEP440Version("0.0.1"),
                next_version=PEP440Version("1.0.0.dev1"),
                update_project=False,
                changelog=CHANGELOG,
            )

        git_mock.push.assert_has_calls(
            [
                call(follow_tags=True, remote=None),
                call(follow_tags=True, remote=None),
            ],
        )

        self.assertEqual(
            create_release_mock.await_args.args[1:],  # type: ignore[union-attr]
            (release_version, "foo", "A Changelog", False),
        )

        git_mock.add.assert_not_called()
        git_mock.commit.assert_not_called()
        git_mock.tag.assert_called_once_with(
            "v0.0.2", gpg_key_id="1234", message="Automatic release to 0.0.2"
        )

        self.assertEqual(released, CreateReleaseReturnValue.SUCCESS)

        project_mock.assert_not_called()

    @patch("pontos.release.create.get_last_release_version", autospec=True)
    @patch(
        "pontos.release.create.CreateReleaseCommand._create_release",
        autospec=True,
    )
    @patch("pontos.release.create.Project._gather_commands", autospec=True)
    def test_gather_last_release(
        self,
        gather_commands_mock: MagicMock,
        create_release_mock: AsyncMock,
        get_last_release_version_mock: MagicMock,
    ):
        current_version = PEP440Version("0.0.1")
        release_version = PEP440Version("1.0.0")
        next_version = PEP440Version("1.0.1.dev1")
        command_mock = MagicMock(spec=GoVersionCommand)
        command_mock.update_version.side_effect = [
            VersionUpdate(
                previous=current_version,
                new=release_version,
                changed_files=[Path("MyProject.conf")],
            ),
            VersionUpdate(
                previous=release_version,
                new=next_version,
                changed_files=[Path("MyProject.conf")],
            ),
        ]
        git_mock = MagicMock(spec=Git)
        git_mock.status.return_value = [StatusEntry("M  MyProject.conf")]
        gather_commands_mock.return_value = [command_mock]
        get_last_release_version_mock.return_value = current_version

        with temp_git_repository():
            released = CreateReleaseCommand(
                git=git_mock,
            ).run(
                token=TOKEN,
                repository=REPOSITORY,
                git_signing_key=GIT_SIGNING_KEY,
                versioning_scheme=PEP440VersioningScheme,
                release_type=ReleaseType.MAJOR,
                next_version=PEP440VersioningScheme.parse_version("1.0.1.dev1"),
                changelog=CHANGELOG,
            )

        get_last_release_version_mock.assert_called_once_with(
            git=git_mock,
            parse_version=PEP440VersioningScheme.parse_version,
            git_tag_prefix="v",
            tag_name=None,
            ignore_pre_releases=True,
        )

        git_mock.push.assert_has_calls(
            [
                call(follow_tags=True, remote=None),
                call(follow_tags=True, remote=None),
            ],
        )
        command_mock.update_version.assert_has_calls(
            [
                call(release_version, force=False),
                call(next_version, force=False),
            ],
        )

        self.assertEqual(
            create_release_mock.await_args.args[1:],  # type: ignore[union-attr]
            (release_version, "foo", "A Changelog", False),
        )

        git_mock.add.assert_has_calls(
            [call(Path("MyProject.conf")), call(Path("MyProject.conf"))]
        )
        git_mock.commit.assert_has_calls(
            [
                call(
                    "Automatic release to 1.0.0",
                    verify=False,
                    gpg_signing_key="1234",
                ),
                call(
                    "Automatic adjustments after release [skip ci]\n\n"
                    "* Update to version 1.0.1.dev1\n",
                    verify=False,
                    gpg_signing_key="1234",
                ),
            ]
        )
        git_mock.tag.assert_called_once_with(
            "v1.0.0", gpg_key_id="1234", message="Automatic release to 1.0.0"
        )

        self.assertEqual(released, CreateReleaseReturnValue.SUCCESS)

    def test_last_release_version_error(self):
        git_mock = MagicMock(spec=Git)
        with temp_git_repository():
            released = CreateReleaseCommand(
                git=git_mock,
            ).run(
                token=TOKEN,
                repository=REPOSITORY,
                git_signing_key=GIT_SIGNING_KEY,
                versioning_scheme=PEP440VersioningScheme,
                release_type=ReleaseType.MAJOR,
            )

        self.assertEqual(
            released, CreateReleaseReturnValue.NO_LAST_RELEASE_VERSION
        )

    @patch("pontos.release.create.get_last_release_version", autospec=True)
    def test_no_last_release_version(
        self,
        get_last_release_version_mock: MagicMock,
    ):
        get_last_release_version_mock.return_value = None
        git_mock = MagicMock(spec=Git)

        with temp_git_repository():
            released = CreateReleaseCommand(
                git=git_mock,
            ).run(
                token=TOKEN,
                repository=REPOSITORY,
                git_signing_key=GIT_SIGNING_KEY,
                versioning_scheme=PEP440VersioningScheme,
                release_type=ReleaseType.MINOR,
            )

        self.assertEqual(
            released, CreateReleaseReturnValue.NO_LAST_RELEASE_VERSION
        )

    @patch(
        "pontos.release.create.get_next_release_version",
        autospec=True,
    )
    def test_no_release_error(
        self,
        get_next_release_version_mock: MagicMock,
    ):
        get_next_release_version_mock.side_effect = VersionError("An error")
        git_mock = MagicMock(spec=Git)

        with temp_git_repository():
            released = CreateReleaseCommand(
                git=git_mock,
            ).run(
                token=TOKEN,
                repository=REPOSITORY,
                git_signing_key=GIT_SIGNING_KEY,
                versioning_scheme=PEP440VersioningScheme,
                release_type=ReleaseType.PATCH,
                last_release_version=PEP440Version("0.0.1"),
            )

        self.assertEqual(released, CreateReleaseReturnValue.NO_RELEASE_VERSION)

    def test_has_tag(
        self,
    ):
        tags = ["v1.0.0", "v1.0.1"]
        git_mock = MagicMock(spec=Git)
        git_mock.list_tags.return_value = tags

        with temp_git_repository():
            released = CreateReleaseCommand(
                git=git_mock,
            ).run(
                token=TOKEN,
                repository=REPOSITORY,
                git_signing_key=GIT_SIGNING_KEY,
                versioning_scheme=PEP440VersioningScheme,
                release_type=ReleaseType.VERSION,
                release_version=PEP440Version("1.0.1"),
            )

            self.assertEqual(released, CreateReleaseReturnValue.ALREADY_TAKEN)

        self.assertEqual(git_mock.list_tags.call_count, 2)

    @patch(
        "pontos.release.create.CreateReleaseCommand._create_release",
        autospec=True,
    )
    @patch("pontos.release.create.Project._gather_commands", autospec=True)
    def test_update_version_error(
        self,
        gather_commands_mock: MagicMock,
        create_release_mock: AsyncMock,
    ):
        command_mock = MagicMock(spec=GoVersionCommand)
        gather_commands_mock.return_value = [command_mock]
        command_mock.update_version.side_effect = VersionError("An error")
        git_mock = MagicMock(spec=Git)

        with temp_git_repository():
            released = CreateReleaseCommand(
                git=git_mock,
            ).run(
                token=TOKEN,
                repository=REPOSITORY,
                git_signing_key=GIT_SIGNING_KEY,
                versioning_scheme=PEP440VersioningScheme,
                release_type=ReleaseType.VERSION,
                release_version=PEP440Version("0.0.1"),
                next_version=PEP440Version("0.0.2.dev1"),
                last_release_version=PEP440Version("0.0.1rc1"),
            )

        git_mock.return_value.push.assert_not_called()

        command_mock.update_version.assert_called_once_with(
            PEP440Version("0.0.1"), force=False
        )

        create_release_mock.assert_not_awaited()

        git_mock.return_value.add.assert_not_called()
        git_mock.return_value.commit.assert_not_called()
        git_mock.return_value.tag.assert_not_called()

        self.assertEqual(
            released, CreateReleaseReturnValue.UPDATE_VERSION_ERROR
        )

    @patch(
        "pontos.release.create.CreateReleaseCommand._create_release",
        autospec=True,
    )
    @patch("pontos.release.create.Project._gather_commands", autospec=True)
    def test_github_create_release_failure(
        self,
        gather_commands_mock: MagicMock,
        create_release_mock: AsyncMock,
    ):
        current_version = PEP440Version("0.0.0")
        release_version = PEP440Version("0.0.1")
        next_version = PEP440Version("0.0.2.dev1")
        command_mock = MagicMock(spec=GoVersionCommand)
        command_mock.update_version.side_effect = [
            VersionUpdate(
                previous=current_version,
                new=release_version,
                changed_files=[Path("MyProject.conf")],
            ),
            VersionUpdate(
                previous=release_version,
                new=next_version,
                changed_files=[Path("MyProject.conf")],
            ),
        ]
        create_release_mock.side_effect = HTTPStatusError(
            "Error during a request",
            request=MagicMock(spec=Request),
            response=MagicMock(spec=Response),
        )
        git_mock = MagicMock(spec=Git)
        git_mock.status.return_value = [StatusEntry("M  MyProject.conf")]
        gather_commands_mock.return_value = [command_mock]

        with temp_git_repository():
            released = CreateReleaseCommand(
                git=git_mock,
            ).run(
                token=TOKEN,
                repository=REPOSITORY,
                git_signing_key=GIT_SIGNING_KEY,
                versioning_scheme=PEP440VersioningScheme,
                release_type=ReleaseType.VERSION,
                release_version=release_version,
                next_version=next_version,
                last_release_version=current_version,
                changelog=CHANGELOG,
            )

        self.assertEqual(
            released, CreateReleaseReturnValue.CREATE_RELEASE_ERROR
        )

        git_mock.push.assert_has_calls(
            [
                call(follow_tags=True, remote=None),
                call("v0.0.1", delete=True, remote=None),
                call(force=True, remote=None),
            ]
        )
        git_mock.reset.assert_called_once_with("HEAD^", mode=ResetMode.HARD)
        git_mock.delete_tag.assert_called_once_with("v0.0.1")
        git_mock.add.assert_called_once_with(Path("MyProject.conf"))
        git_mock.commit.assert_called_once_with(
            "Automatic release to 0.0.1", verify=False, gpg_signing_key="1234"
        )
        git_mock.tag.assert_called_once_with(
            "v0.0.1", gpg_key_id="1234", message="Automatic release to 0.0.1"
        )

        self.assertEqual(
            create_release_mock.await_args.args[1:],  # type: ignore[union-attr]
            (release_version, "foo", "A Changelog", False),
        )

    @patch(
        "pontos.release.create.CreateReleaseCommand._create_release",
        autospec=True,
    )
    @patch("pontos.release.create.Project._gather_commands", autospec=True)
    def test_update_version_after_release_error(
        self,
        gather_commands_mock: MagicMock,
        create_release_mock: AsyncMock,
    ):
        current_version = PEP440Version("0.0.0")
        release_version = PEP440Version("0.0.1")
        next_version = PEP440Version("0.0.2.dev1")
        command_mock = MagicMock(spec=GoVersionCommand)
        gather_commands_mock.return_value = [command_mock]
        command_mock.update_version.side_effect = [
            VersionUpdate(
                previous=current_version,
                new=release_version,
                changed_files=[Path("MyProject.conf")],
            ),
            VersionError("An error"),
        ]
        git_mock = MagicMock(spec=Git)
        git_mock.status.return_value = [StatusEntry("M  MyProject.conf")]

        with temp_git_repository():
            released = CreateReleaseCommand(
                git=git_mock,
            ).run(
                token=TOKEN,
                repository=REPOSITORY,
                git_signing_key=GIT_SIGNING_KEY,
                versioning_scheme=PEP440VersioningScheme,
                release_type=ReleaseType.VERSION,
                release_version=release_version,
                next_version=next_version,
                last_release_version=current_version,
                changelog=CHANGELOG,
            )

        git_mock.push.assert_called_once_with(follow_tags=True, remote=None)

        command_mock.update_version.assert_has_calls(
            [
                call(release_version, force=False),
                call(next_version, force=False),
            ]
        )

        self.assertEqual(
            create_release_mock.await_args.args[1:],  # type: ignore[union-attr]
            (release_version, "foo", "A Changelog", False),
        )

        git_mock.add.assert_called_once_with(Path("MyProject.conf"))
        git_mock.commit.assert_called_once_with(
            "Automatic release to 0.0.1", verify=False, gpg_signing_key="1234"
        )
        git_mock.tag.assert_called_once_with(
            "v0.0.1", gpg_key_id="1234", message="Automatic release to 0.0.1"
        )

        self.assertEqual(
            released,
            CreateReleaseReturnValue.UPDATE_VERSION_AFTER_RELEASE_ERROR,
        )

    @patch(
        "pontos.release.create.CreateReleaseCommand._create_release",
        autospec=True,
    )
    @patch("pontos.release.create.Project._gather_commands", autospec=True)
    def test_release_to_specific_git_remote(
        self,
        gather_commands_mock: MagicMock,
        create_release_mock: AsyncMock,
    ):
        current_version = PEP440Version("0.0.0")
        release_version = PEP440Version("0.0.1")
        next_version = PEP440Version("0.0.2.dev1")
        command_mock = MagicMock(spec=GoVersionCommand)
        gather_commands_mock.return_value = [command_mock]
        command_mock.update_version.side_effect = [
            VersionUpdate(
                previous=current_version,
                new=release_version,
                changed_files=[Path("MyProject.conf")],
            ),
            VersionUpdate(
                previous=release_version,
                new=next_version,
                changed_files=[Path("MyProject.conf")],
            ),
        ]
        git_mock = MagicMock(spec=Git)
        git_mock.status.return_value = [StatusEntry("M  MyProject.conf")]

        with temp_git_repository():
            released = CreateReleaseCommand(
                git=git_mock,
            ).run(
                token=TOKEN,
                repository=REPOSITORY,
                git_signing_key=GIT_SIGNING_KEY,
                versioning_scheme=PEP440VersioningScheme,
                release_type=ReleaseType.VERSION,
                release_version=release_version,
                next_version=next_version,
                last_release_version=current_version,
                git_remote_name="upstream",
                changelog=CHANGELOG,
            )

        git_mock.push.assert_has_calls(
            [
                call(follow_tags=True, remote="upstream"),
                call(follow_tags=True, remote="upstream"),
            ],
        )

        self.assertEqual(
            create_release_mock.await_args.args[1:],  # type: ignore[union-attr]
            (release_version, "foo", "A Changelog", False),
        )

        command_mock.update_version.assert_has_calls(
            [
                call(release_version, force=False),
                call(next_version, force=False),
            ],
        )

        git_mock.add.assert_called_with(Path("MyProject.conf"))
        git_mock.commit.assert_called_with(
            "Automatic adjustments after release [skip ci]\n\n"
            "* Update to version 0.0.2.dev1\n",
            verify=False,
            gpg_signing_key="1234",
        )
        git_mock.tag.assert_called_once_with(
            "v0.0.1", gpg_key_id="1234", message="Automatic release to 0.0.1"
        )

        self.assertEqual(released, CreateReleaseReturnValue.SUCCESS)

    @patch(
        "pontos.release.create.CreateReleaseCommand._create_release",
        autospec=True,
    )
    @patch("pontos.release.create.Project._gather_commands", autospec=True)
    def test_release_without_git_prefix(
        self,
        gather_commands_mock: MagicMock,
        create_release_mock: AsyncMock,
    ):
        current_version = PEP440Version("0.0.0")
        release_version = PEP440Version("0.0.1")
        next_version = PEP440Version("0.0.2.dev1")
        command_mock = MagicMock(spec=GoVersionCommand)
        gather_commands_mock.return_value = [command_mock]
        command_mock.update_version.side_effect = [
            VersionUpdate(
                previous=current_version,
                new=release_version,
                changed_files=[Path("MyProject.conf")],
            ),
            VersionUpdate(
                previous=release_version,
                new=next_version,
                changed_files=[Path("MyProject.conf")],
            ),
        ]
        git_mock = MagicMock(spec=Git)
        git_mock.status.return_value = [StatusEntry("M  MyProject.conf")]

        with temp_git_repository():
            released = CreateReleaseCommand(
                git=git_mock,
            ).run(
                token=TOKEN,
                repository=REPOSITORY,
                git_signing_key=GIT_SIGNING_KEY,
                versioning_scheme=PEP440VersioningScheme,
                release_type=ReleaseType.VERSION,
                release_version=release_version,
                next_version=next_version,
                last_release_version=current_version,
                git_tag_prefix="",
                changelog=CHANGELOG,
            )

        git_mock.push.assert_has_calls(
            [
                call(follow_tags=True, remote=None),
                call(follow_tags=True, remote=None),
            ],
        )

        self.assertEqual(
            create_release_mock.await_args.args[1:],  # type: ignore[union-attr]
            (release_version, "foo", "A Changelog", False),
        )

        command_mock.update_version.assert_has_calls(
            [
                call(release_version, force=False),
                call(next_version, force=False),
            ],
        )

        git_mock.add.assert_called_with(Path("MyProject.conf"))
        git_mock.commit.assert_called_with(
            "Automatic adjustments after release [skip ci]\n\n"
            "* Update to version 0.0.2.dev1\n",
            verify=False,
            gpg_signing_key="1234",
        )
        git_mock.tag.assert_called_once_with(
            "0.0.1", gpg_key_id="1234", message="Automatic release to 0.0.1"
        )

        self.assertEqual(released, CreateReleaseReturnValue.SUCCESS)

    @patch("pontos.release.create.GitHubAsyncRESTApi", autospec=True)
    @patch("pontos.release.create.Project._gather_commands", autospec=True)
    def test_release_github_api(
        self,
        gather_commands_mock: MagicMock,
        github_api_mock: AsyncMock,
    ):
        current_version = PEP440Version("0.0.0")
        release_version = PEP440Version("0.0.1")
        next_version = PEP440Version("0.0.2.dev1")
        command_mock = MagicMock(spec=GoVersionCommand)
        gather_commands_mock.return_value = [command_mock]
        command_mock.update_version.side_effect = [
            VersionUpdate(
                previous=current_version,
                new=release_version,
                changed_files=[Path("MyProject.conf")],
            ),
            VersionUpdate(
                previous=release_version,
                new=next_version,
                changed_files=[Path("MyProject.conf")],
            ),
        ]
        create_api_mock = AsyncMock()
        github_api_mock.return_value.releases.create = create_api_mock
        git_mock = MagicMock(spec=Git)
        git_mock.status.return_value = [StatusEntry("M  MyProject.conf")]

        with temp_git_repository():
            released = CreateReleaseCommand(
                git=git_mock,
            ).run(
                token=TOKEN,
                repository="bar/foo",
                git_signing_key=GIT_SIGNING_KEY,
                versioning_scheme=PEP440VersioningScheme,
                release_type=ReleaseType.VERSION,
                release_version=release_version,
                next_version=next_version,
                last_release_version=current_version,
                git_remote_name="upstream",
                changelog=CHANGELOG,
            )

        git_mock.push.assert_has_calls(
            [
                call(follow_tags=True, remote="upstream"),
                call(follow_tags=True, remote="upstream"),
            ],
        )

        create_api_mock.assert_awaited_once_with(
            "bar/foo",
            "v0.0.1",
            name="foo 0.0.1",
            body="A Changelog",
            prerelease=False,
        )

        command_mock.update_version.assert_has_calls(
            [
                call(release_version, force=False),
                call(next_version, force=False),
            ],
        )

        git_mock.add.assert_called_with(Path("MyProject.conf"))
        git_mock.commit.assert_called_with(
            "Automatic adjustments after release [skip ci]\n\n"
            "* Update to version 0.0.2.dev1\n",
            verify=False,
            gpg_signing_key="1234",
        )
        git_mock.tag.assert_called_once_with(
            "v0.0.1", gpg_key_id="1234", message="Automatic release to 0.0.1"
        )

        self.assertEqual(released, CreateReleaseReturnValue.SUCCESS)

    @patch("pontos.release.create.GitHubAsyncRESTApi", autospec=True)
    @patch("pontos.release.create.Project._gather_commands", autospec=True)
    def test_release_github_api_pre_release(
        self,
        gather_commands_mock: MagicMock,
        github_api_mock: AsyncMock,
    ):
        current_version = PEP440Version("0.0.0")
        release_version = PEP440Version("0.0.1a1")
        next_version = PEP440Version("0.0.1a1+dev1")
        command_mock = MagicMock(spec=GoVersionCommand)
        gather_commands_mock.return_value = [command_mock]
        command_mock.update_version.side_effect = [
            VersionUpdate(
                previous=current_version,
                new=release_version,
                changed_files=[Path("MyProject.conf")],
            ),
            VersionUpdate(
                previous=release_version,
                new=next_version,
                changed_files=[Path("MyProject.conf")],
            ),
        ]
        create_api_mock = AsyncMock()
        github_api_mock.return_value.releases.create = create_api_mock
        git_mock = MagicMock(spec=Git)
        git_mock.status.return_value = [StatusEntry("M  MyProject.conf")]

        with temp_git_repository():
            released = CreateReleaseCommand(
                git=git_mock,
            ).run(
                token=TOKEN,
                repository="bar/foo",
                git_signing_key=GIT_SIGNING_KEY,
                versioning_scheme=PEP440VersioningScheme,
                release_type=ReleaseType.VERSION,
                release_version=release_version,
                next_version=next_version,
                last_release_version=current_version,
                git_remote_name="upstream",
                changelog=CHANGELOG,
            )

        git_mock.push.assert_has_calls(
            [
                call(follow_tags=True, remote="upstream"),
                call(follow_tags=True, remote="upstream"),
            ],
        )

        create_api_mock.assert_awaited_once_with(
            "bar/foo",
            "v0.0.1a1",
            name="foo 0.0.1a1",
            body="A Changelog",
            prerelease=True,
        )

        command_mock.update_version.assert_has_calls(
            [
                call(release_version, force=False),
                call(next_version, force=False),
            ],
        )

        git_mock.add.assert_called_with(Path("MyProject.conf"))
        git_mock.commit.assert_called_with(
            "Automatic adjustments after release [skip ci]\n\n"
            "* Update to version 0.0.1a1+dev1\n",
            verify=False,
            gpg_signing_key="1234",
        )
        git_mock.tag.assert_called_once_with(
            "v0.0.1a1",
            gpg_key_id="1234",
            message="Automatic release to 0.0.1a1",
        )

        self.assertEqual(released, CreateReleaseReturnValue.SUCCESS)

    @patch(
        "pontos.release.create.CreateReleaseCommand._create_release",
        autospec=True,
    )
    def test_release_with_go_project(
        self,
        create_release_mock: AsyncMock,
    ):
        release_version = PEP440Version("0.0.2")
        git_mock = MagicMock(spec=Git)
        git_mock.status.return_value = [
            StatusEntry(f"M  {GoVersionCommand.version_file_path}")
        ]
        git_mock.list_tags.return_value = ["v0.0.1"]

        with setup_go_project(current_version="0.0.1", tags=["0.0.1"]):
            released = CreateReleaseCommand(
                git=git_mock,
            ).run(
                token=TOKEN,
                repository="foo/bar",
                git_signing_key=GIT_SIGNING_KEY,
                versioning_scheme=PEP440VersioningScheme,
                release_type=ReleaseType.PATCH,
                changelog=CHANGELOG,
            )

        self.assertEqual(released, CreateReleaseReturnValue.SUCCESS)

        git_mock.push.assert_called_with(follow_tags=True, remote=None)
        git_mock.add.assert_called_with(GoVersionCommand.version_file_path)
        git_mock.commit.assert_called_with(
            "Automatic adjustments after release [skip ci]\n\n"
            "* Update to version 0.0.3.dev1\n",
            verify=False,
            gpg_signing_key="1234",
        )
        git_mock.tag.assert_called_once_with(
            "v0.0.2", gpg_key_id="1234", message="Automatic release to 0.0.2"
        )

        self.assertEqual(
            create_release_mock.await_args.args[1:],  # type: ignore[union-attr]
            (release_version, "foo", "A Changelog", False),
        )

        self.assertEqual(released, CreateReleaseReturnValue.SUCCESS)

    @patch("pontos.changelog.conventional_commits.Git", autospec=True)
    @patch(
        "pontos.release.create.CreateReleaseCommand._create_release",
        autospec=True,
    )
    @patch("pontos.release.create.Project._gather_commands", autospec=True)
    def test_release_with_gathered_changelog(
        self,
        gather_commands_mock: MagicMock,
        create_release_mock: AsyncMock,
        cc_git_mock: MagicMock,
    ):
        current_version = PEP440Version("0.0.1")
        release_version = PEP440Version("0.0.2")
        next_version = PEP440Version("1.0.0.dev1")
        command_mock = MagicMock(spec=GoVersionCommand)
        gather_commands_mock.return_value = [command_mock]
        command_mock.update_version.side_effect = [
            VersionUpdate(
                previous=current_version,
                new=release_version,
                changed_files=[Path("project.conf")],
            ),
            VersionUpdate(
                previous=release_version,
                new=next_version,
                changed_files=[Path("project.conf"), Path("version.lang")],
            ),
        ]
        git_mock = MagicMock(spec=Git)
        git_mock.status.return_value = [StatusEntry("M  project.conf")]
        cc_git_mock.return_value.log.return_value = [
            "1234567 Add: foo bar",
            "8abcdef Add: bar baz",
            "8abcd3f Add bar baz",
            "8abcd3d Adding bar baz",
            "1337abc Change: bar to baz",
            "42a42a4 Remove: foo bar again",
            "fedcba8 Test: bar baz testing",
            "dead901 Refactor: bar baz ref",
            "fedcba8 Fix: bar baz fixing",
            "d0c4d0c Doc: bar baz documenting",
        ]
        today = datetime.today().strftime("%Y-%m-%d")
        expected_changelog = f"""## [0.0.2] - {today}

## Added
* foo bar [1234567](https://github.com/greenbone/foo/commit/1234567)
* bar baz [8abcdef](https://github.com/greenbone/foo/commit/8abcdef)

## Removed
* foo bar again [42a42a4](https://github.com/greenbone/foo/commit/42a42a4)

## Changed
* bar to baz [1337abc](https://github.com/greenbone/foo/commit/1337abc)

## Bug Fixes
* bar baz fixing [fedcba8](https://github.com/greenbone/foo/commit/fedcba8)

[0.0.2]: https://github.com/greenbone/foo/compare/v0.0.1...v0.0.2"""

        with temp_git_repository():
            released = CreateReleaseCommand(
                git=git_mock,
            ).run(
                token=TOKEN,
                repository=REPOSITORY,
                git_signing_key=GIT_SIGNING_KEY,
                versioning_scheme=PEP440VersioningScheme,
                release_type=ReleaseType.VERSION,
                release_version=release_version,
                next_version=next_version,
                last_release_version=current_version,
            )

        git_mock.list_tags.assert_called_once_with()

        cc_git_mock.return_value.log.assert_called_once_with(
            "v0.0.1..HEAD", oneline=True
        )

        git_mock.push.assert_has_calls(
            [
                call(follow_tags=True, remote=None),
                call(follow_tags=True, remote=None),
            ]
        )

        command_mock.update_version.assert_has_calls(
            [
                call(release_version, force=False),
                call(next_version, force=False),
            ]
        )

        self.assertEqual(
            create_release_mock.await_args.args[1:],  # type: ignore[union-attr]
            (release_version, "foo", expected_changelog, False),
        )

        git_mock.tag.assert_called_once_with(
            "v0.0.2", gpg_key_id="1234", message="Automatic release to 0.0.2"
        )
        git_mock.add.assert_has_calls(
            [
                call(Path("project.conf")),
                call(Path("project.conf")),
                call(Path("version.lang")),
            ]
        )
        git_mock.commit.assert_has_calls(
            [
                call(
                    "Automatic release to 0.0.2",
                    verify=False,
                    gpg_signing_key="1234",
                ),
                call(
                    "Automatic adjustments after release [skip ci]\n\n"
                    "* Update to version 1.0.0.dev1\n",
                    verify=False,
                    gpg_signing_key="1234",
                ),
            ]
        )

        self.assertEqual(released, CreateReleaseReturnValue.SUCCESS)

    @patch("pontos.changelog.conventional_commits.Git", autospec=True)
    @patch(
        "pontos.release.create.CreateReleaseCommand._create_release",
        autospec=True,
    )
    @patch("pontos.release.create.Project._gather_commands", autospec=True)
    def test_release_with_changelog(
        self,
        gather_commands_mock: MagicMock,
        create_release_mock: AsyncMock,
        cc_git_mock: MagicMock,
    ):
        current_version = PEP440Version("0.0.1")
        release_version = PEP440Version("0.0.2")
        next_version = PEP440Version("1.0.0.dev1")
        command_mock = MagicMock(spec=GoVersionCommand)
        gather_commands_mock.return_value = [command_mock]
        command_mock.update_version.side_effect = [
            VersionUpdate(
                previous=current_version,
                new=release_version,
                changed_files=[Path("project.conf")],
            ),
            VersionUpdate(
                previous=release_version,
                new=next_version,
                changed_files=[Path("project.conf"), Path("version.lang")],
            ),
        ]
        git_mock = MagicMock(spec=Git)
        git_mock.status.return_value = [StatusEntry("M  project.conf")]

        with temp_git_repository():
            released = CreateReleaseCommand(
                git=git_mock,
            ).run(
                token=TOKEN,
                repository=REPOSITORY,
                git_signing_key=GIT_SIGNING_KEY,
                versioning_scheme=PEP440VersioningScheme,
                release_type=ReleaseType.VERSION,
                release_version=release_version,
                next_version=next_version,
                last_release_version=current_version,
                changelog="A changelog",
            )

        git_mock.list_tags.assert_called_once_with()

        cc_git_mock.return_value.log.assert_not_called()

        git_mock.push.assert_has_calls(
            [
                call(follow_tags=True, remote=None),
                call(follow_tags=True, remote=None),
            ]
        )

        command_mock.update_version.assert_has_calls(
            [
                call(release_version, force=False),
                call(next_version, force=False),
            ]
        )

        self.assertEqual(
            create_release_mock.await_args.args[1:],  # type: ignore[union-attr]
            (release_version, "foo", "A changelog", False),
        )

        git_mock.tag.assert_called_once_with(
            "v0.0.2", gpg_key_id="1234", message="Automatic release to 0.0.2"
        )
        git_mock.add.assert_has_calls(
            [
                call(Path("project.conf")),
                call(Path("project.conf")),
                call(Path("version.lang")),
            ]
        )
        git_mock.commit.assert_has_calls(
            [
                call(
                    "Automatic release to 0.0.2",
                    verify=False,
                    gpg_signing_key="1234",
                ),
                call(
                    "Automatic adjustments after release [skip ci]\n\n"
                    "* Update to version 1.0.0.dev1\n",
                    verify=False,
                    gpg_signing_key="1234",
                ),
            ]
        )

        self.assertEqual(released, CreateReleaseReturnValue.SUCCESS)

    @patch(
        "pontos.release.create.CreateReleaseCommand._create_release",
        autospec=True,
    )
    @patch("pontos.release.create.Project._gather_commands", autospec=True)
    def test_release_local(
        self,
        gather_commands_mock: MagicMock,
        create_release_mock: AsyncMock,
    ):
        current_version = PEP440Version("0.0.0")
        release_version = PEP440Version("0.0.1")
        next_version = PEP440Version("0.0.2.dev1")
        command_mock = MagicMock(spec=GoVersionCommand)
        gather_commands_mock.return_value = [command_mock]
        command_mock.update_version.side_effect = [
            VersionUpdate(
                previous=current_version,
                new=release_version,
                changed_files=[Path("MyProject.conf")],
            ),
            VersionUpdate(
                previous=release_version,
                new=next_version,
                changed_files=[Path("MyProject.conf")],
            ),
        ]
        git_mock = MagicMock(spec=Git)
        git_mock.status.return_value = [StatusEntry("M  MyProject.conf")]

        with temp_git_repository():
            released = CreateReleaseCommand(
                git=git_mock,
            ).run(
                token=TOKEN,
                repository=REPOSITORY,
                git_signing_key=GIT_SIGNING_KEY,
                versioning_scheme=PEP440VersioningScheme,
                release_type=ReleaseType.VERSION,
                release_version=release_version,
                next_version=next_version,
                last_release_version=current_version,
                local=True,
                changelog=CHANGELOG,
            )

        git_mock.push.assert_not_called()

        create_release_mock.assert_not_awaited()

        command_mock.update_version.assert_has_calls(
            [
                call(release_version, force=False),
                call(next_version, force=False),
            ]
        )

        git_mock.add.assert_called_with(Path("MyProject.conf"))
        git_mock.commit.assert_called_with(
            "Automatic adjustments after release [skip ci]\n\n"
            "* Update to version 0.0.2.dev1\n",
            verify=False,
            gpg_signing_key="1234",
        )
        git_mock.tag.assert_called_once_with(
            "v0.0.1", gpg_key_id="1234", message="Automatic release to 0.0.1"
        )

        self.assertEqual(released, CreateReleaseReturnValue.SUCCESS)

    @patch(
        "pontos.release.create.CreateReleaseCommand._create_release",
        autospec=True,
    )
    @patch("pontos.release.create.Project._gather_commands", autospec=True)
    def test_release_enforce_github_release(
        self,
        gather_commands_mock: MagicMock,
        create_release_mock: AsyncMock,
    ):
        current_version = PEP440Version("0.0.1")
        release_version = PEP440Version("0.0.2")
        next_version = PEP440Version("1.0.0.dev1")
        command_mock = MagicMock(spec=GoVersionCommand)
        gather_commands_mock.return_value = [command_mock]
        command_mock.update_version.side_effect = [
            VersionUpdate(
                previous=current_version,
                new=release_version,
                changed_files=[Path("MyProject.conf")],
            ),
            VersionUpdate(
                previous=release_version,
                new=next_version,
                changed_files=[Path("MyProject.conf")],
            ),
        ]
        git_mock = MagicMock(spec=Git)
        git_mock.status.return_value = [StatusEntry("M  MyProject.conf")]

        with temp_git_repository():
            released = CreateReleaseCommand(
                git=git_mock,
            ).run(
                token=TOKEN,
                repository=REPOSITORY,
                git_signing_key=GIT_SIGNING_KEY,
                versioning_scheme=PEP440VersioningScheme,
                release_type=ReleaseType.PATCH,
                next_version=next_version,
                last_release_version=current_version,
                github_pre_release=True,
                changelog=CHANGELOG,
            )

        git_mock.push.assert_has_calls(
            [
                call(follow_tags=True, remote=None),
                call(follow_tags=True, remote=None),
            ],
        )
        command_mock.update_version.assert_has_calls(
            [
                call(release_version, force=False),
                call(next_version, force=False),
            ],
        )

        self.assertEqual(
            create_release_mock.await_args.args[1:],  # type: ignore[union-attr]
            (release_version, "foo", "A Changelog", True),
        )

        git_mock.add.assert_has_calls(
            [call(Path("MyProject.conf")), call(Path("MyProject.conf"))]
        )
        git_mock.commit.assert_has_calls(
            [
                call(
                    "Automatic release to 0.0.2",
                    verify=False,
                    gpg_signing_key="1234",
                ),
                call(
                    "Automatic adjustments after release [skip ci]\n\n"
                    "* Update to version 1.0.0.dev1\n",
                    verify=False,
                    gpg_signing_key="1234",
                ),
            ]
        )
        git_mock.tag.assert_called_once_with(
            "v0.0.2", gpg_key_id="1234", message="Automatic release to 0.0.2"
        )

        self.assertEqual(released, CreateReleaseReturnValue.SUCCESS)

    @patch(
        "pontos.release.create.CreateReleaseCommand._create_release",
        autospec=True,
    )
    @patch("pontos.release.create.Project._gather_commands", autospec=True)
    def test_release_no_next_release(
        self,
        gather_commands_mock: MagicMock,
        create_release_mock: AsyncMock,
    ):
        current_version = PEP440Version("0.0.1")
        release_version = PEP440Version("0.0.2")
        next_version = PEP440Version("1.0.0.dev1")
        command_mock = MagicMock(spec=GoVersionCommand)
        gather_commands_mock.return_value = [command_mock]
        command_mock.update_version.side_effect = [
            VersionUpdate(
                previous=current_version,
                new=release_version,
                changed_files=[Path("MyProject.conf")],
            ),
            VersionUpdate(
                previous=release_version,
                new=next_version,
                changed_files=[Path("MyProject.conf")],
            ),
        ]
        git_mock = MagicMock(spec=Git)
        git_mock.status.return_value = [StatusEntry("M  MyProject.conf")]

        with temp_git_repository():
            released = CreateReleaseCommand(
                git=git_mock,
            ).run(
                token=TOKEN,
                repository=REPOSITORY,
                git_signing_key=GIT_SIGNING_KEY,
                versioning_scheme=PEP440VersioningScheme,
                release_type=ReleaseType.PATCH,
                next_version=False,
                last_release_version=current_version,
                changelog=CHANGELOG,
            )

        git_mock.push.assert_has_calls(
            [
                call(follow_tags=True, remote=None),
            ],
        )
        command_mock.update_version.assert_has_calls(
            [
                call(release_version, force=False),
            ],
        )

        self.assertEqual(
            create_release_mock.await_args.args[1:],  # type: ignore[union-attr]
            (release_version, "foo", "A Changelog", False),
        )

        git_mock.add.assert_has_calls([call(Path("MyProject.conf"))])
        git_mock.commit.assert_has_calls(
            [
                call(
                    "Automatic release to 0.0.2",
                    verify=False,
                    gpg_signing_key="1234",
                ),
            ]
        )
        git_mock.tag.assert_called_once_with(
            "v0.0.2", gpg_key_id="1234", message="Automatic release to 0.0.2"
        )

        self.assertEqual(released, CreateReleaseReturnValue.SUCCESS)


class CreateReleaseTestCase(unittest.TestCase):
    def test_no_token(self):
        _, _, args = parse_args(
            [
                "release",
                "--repository",
                "greenbone/foo",
                "--release-version",
                "0.0.1",
            ]
        )

        released = create_release(
            terminal=mock_terminal(),
            error_terminal=mock_terminal(),
            args=args,
            token=None,
        )

        self.assertEqual(released, CreateReleaseReturnValue.TOKEN_MISSING)


@dataclass
class Release:
    release_type: str
    current_version: str
    expected_release_version: str
    tags: Union[str, list[str]]
    expected_last_release_version: Optional[str] = None
    release_series: Optional[str] = None


@patch.dict(
    "os.environ",
    {"GITHUB_TOKEN": "foo", "GITHUB_USER": "user", "GPG_SIGNING_KEY": ""},
)
class ReleaseGoProjectTestCase(unittest.TestCase):
    @patch("pontos.release.create.Git.push", autospec=True)
    @patch(
        "pontos.release.create.GitHubAsyncRESTApi",
        autospec=True,
    )
    def test_release(
        self, github_api_mock: AsyncMock, _git_push_mock: MagicMock
    ):
        create_api_mock = AsyncMock()
        github_api_mock.return_value.releases.create = create_api_mock

        releases = [
            Release(
                release_type="major",
                current_version="1.0.0",
                expected_release_version="2.0.0",
                tags="1.0.0",
            ),
            Release(
                release_type="minor",
                current_version="1.0.0",
                expected_release_version="1.1.0",
                tags="1.0.0",
            ),
            Release(
                release_type="patch",
                current_version="1.0.0",
                expected_release_version="1.0.1",
                tags="1.0.0",
            ),
            Release(
                release_type="patch",
                current_version="1.0.5",
                expected_release_version="1.0.6",
                tags="1.0.5",
            ),
            Release(
                release_type="alpha",
                current_version="1.0.0",
                expected_release_version="1.0.1a1",
                tags="1.0.0",
            ),
            Release(
                release_type="alpha",
                current_version="1.0.0a3",
                expected_release_version="1.0.0a4",
                tags="1.0.0a3",
            ),
            Release(
                release_type="beta",
                current_version="1.0.0",
                expected_release_version="1.0.1b1",
                tags="1.0.0",
            ),
            Release(
                release_type="beta",
                current_version="1.0.0b2",
                expected_release_version="1.0.0b3",
                tags="1.0.0b2",
            ),
            Release(
                release_type="release-candidate",
                current_version="1.0.0",
                expected_release_version="1.0.1rc1",
                tags="1.0.0",
            ),
            Release(
                release_type="release-candidate",
                current_version="1.0.0rc1",
                expected_release_version="1.0.0rc2",
                tags="1.0.0rc1",
            ),
        ]

        for r in releases:
            with setup_go_project(
                current_version=r.current_version, tags=r.tags
            ):
                _, token, args = parse_args(
                    [
                        "release",
                        "--repository",
                        "foo/bar",
                        "--release-type",
                        r.release_type,
                    ]
                )
                released = create_release(
                    terminal=mock_terminal(),
                    error_terminal=mock_terminal(),
                    args=args,
                    token=token,  # type: ignore[arg-type]
                )

            self.assertEqual(
                released,
                CreateReleaseReturnValue.SUCCESS,
                f"v{r.expected_release_version}",
            )
            self.assertEqual(
                create_api_mock.call_args.args[1],
                f"v{r.expected_release_version}",
            )

            create_api_mock.reset_mock()

    @patch(
        "pontos.release.create.CreateReleaseCommand._create_changelog",
        autospec=True,
    )
    @patch("pontos.release.create.Git.push", autospec=True)
    @patch(
        "pontos.release.create.GitHubAsyncRESTApi",
        autospec=True,
    )
    def test_release_series(
        self,
        github_api_mock: AsyncMock,
        _git_push_mock: MagicMock,
        create_changelog_mock: MagicMock,
    ):
        create_api_mock = AsyncMock()
        github_api_mock.return_value.releases.create = create_api_mock

        create_changelog_mock.return_value = "A Changelog"

        releases = [
            Release(
                release_type="major",
                current_version="1.0.0",
                expected_release_version="2.0.0",
                expected_last_release_version="1.0.0",
                tags=["1.0.0", "3.0.0"],
                release_series="1",
            ),
            Release(
                release_type="major",
                current_version="1.0.0rc1",
                expected_release_version="1.0.0",
                expected_last_release_version="0.9.0",
                tags=["0.9.0", "0.8.1", "0.5.0"],
            ),
            Release(
                release_type="minor",
                current_version="1.0.0",
                expected_release_version="2.1.0",
                expected_last_release_version="2.0.0",
                tags=["1.0.0", "2.0.0"],
            ),
            Release(
                release_type="minor",
                current_version="1.0.0",
                expected_release_version="1.1.0",
                expected_last_release_version="1.0.0",
                tags=["1.0.0", "2.0.0"],
                release_series="1",
            ),
            Release(
                release_type="minor",
                current_version="1.1.0",
                expected_release_version="1.2.0",
                expected_last_release_version="1.1.0",
                tags=["1.0.0", "1.1.0", "2.0.0"],
                release_series="1.1",
            ),
            Release(
                release_type="patch",
                current_version="1.0.0",
                expected_release_version="1.0.1",
                expected_last_release_version="1.0.0",
                tags=["1.0.0", "1.1.0", "2.0.0"],
                release_series="1.0",
            ),
            Release(
                release_type="patch",
                current_version="1.0.5",
                expected_release_version="1.0.6",
                expected_last_release_version="1.0.5",
                tags=["1.0.5", "1.1.0", "2.0.0"],
                release_series="1.0",
            ),
            Release(
                release_type="patch",
                current_version="1.0.5",
                expected_release_version="1.1.1",
                expected_last_release_version="1.1.0",
                tags=["1.0.5", "1.1.0", "2.0.0"],
                release_series="1.1",
            ),
            Release(
                release_type="patch",
                current_version="1.1.0rc1",
                expected_release_version="1.1.0",
                expected_last_release_version="1.1.0b1",
                tags=["1.0.0", "1.1.0b1", "1.2.0", "2.0.0"],
                release_series="1.1",
            ),
            Release(
                release_type="alpha",
                current_version="1.0.0",
                expected_release_version="1.0.1a1",
                expected_last_release_version="1.0.0",
                tags=["1.0.0", "2.0.0", "1.1.0"],
                release_series="1.0",
            ),
            Release(
                release_type="alpha",
                current_version="1.0.0a3",
                expected_release_version="1.0.0a4",
                expected_last_release_version="1.0.0a3",
                tags=["1.0.0a3", "2.0.0", "1.1.0"],
                release_series="1.0",
            ),
            Release(
                release_type="beta",
                current_version="1.0.0",
                expected_release_version="1.0.1b1",
                expected_last_release_version="1.0.0",
                tags=["1.0.0", "2.0.0", "1.1.0"],
                release_series="1.0",
            ),
            Release(
                release_type="beta",
                current_version="1.0.0b2",
                expected_release_version="1.0.0b3",
                expected_last_release_version="1.0.0b2",
                tags=["1.0.0b2", "2.0.0", "1.1.0"],
                release_series="1.0",
            ),
            Release(
                release_type="release-candidate",
                current_version="1.0.0",
                expected_release_version="1.0.1rc1",
                expected_last_release_version="1.0.0",
                tags=["1.0.0", "2.0.0", "1.1.0"],
                release_series="1.0",
            ),
            Release(
                release_type="release-candidate",
                current_version="1.0.0rc1",
                expected_release_version="1.0.0rc2",
                expected_last_release_version="1.0.0rc1",
                tags=["1.0.0rc1", "2.0.0", "1.1.0"],
                release_series="1.0",
            ),
        ]

        for r in releases:
            with setup_go_project(
                current_version=r.current_version, tags=r.tags
            ):
                input_args = [
                    "release",
                    "--repository",
                    "foo/bar",
                    "--release-type",
                    r.release_type,
                ]
                if r.release_series:
                    input_args.extend(["--release-series", r.release_series])

                _, token, args = parse_args(input_args)
                released = create_release(
                    terminal=mock_terminal(),
                    error_terminal=mock_terminal(),
                    args=args,
                    token=token,  # type: ignore[arg-type]
                )

            self.assertEqual(
                released,
                CreateReleaseReturnValue.SUCCESS,
                f"Invalid return value for {r}",
            )
            self.assertEqual(
                create_api_mock.call_args.args[1],
                f"v{r.expected_release_version}",
                f"Unexpected current release version {r}",
            )

            self.assertEqual(
                create_changelog_mock.call_args.args[1],
                PEP440Version.from_string(r.expected_release_version),
                f"Unexpected current release version {r}",
            )

            if r.expected_last_release_version is None:
                self.assertIsNone(create_changelog_mock.call_args.args[2])
            else:
                self.assertEqual(
                    create_changelog_mock.call_args.args[2],
                    PEP440Version.from_string(r.expected_last_release_version),
                    f"Unexpected last release version for {r}",
                )

            create_api_mock.reset_mock()
            create_changelog_mock.reset_mock()
pontos-25.3.2/tests/release/test_helper.py000066400000000000000000000252431476255566300206300ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2021-2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later
#

import contextlib
import os
import subprocess
import unittest
from pathlib import Path
from typing import Generator
from unittest.mock import MagicMock, patch

from pontos.git import ConfigScope, Git, GitError
from pontos.release.helper import (
    ReleaseType,
    find_signing_key,
    get_git_repository_name,
    get_next_release_version,
    repository_split,
)
from pontos.testing import temp_git_repository
from pontos.version import VersionError
from pontos.version.schemes import SemanticVersioningScheme


@contextlib.contextmanager
def init_test_git_repo() -> Generator[Path, None, None]:
    with temp_git_repository() as repo_path:
        git = Git()
        git.add_remote("foo", "https://foo.bar/bla.git")
        git.add_remote("origin", "https://foo.bar/testrepo.git")
        yield repo_path


class GetGitRepositoryNameTestCase(unittest.TestCase):
    def test_get_project_name(self):
        with init_test_git_repo():
            project = get_git_repository_name(remote="foo")
            self.assertEqual(project, "bla")

            project = get_git_repository_name()
            self.assertEqual(project, "testrepo")


class FindSigningKeyTestCase(unittest.TestCase):
    def test_find_signing_key(self):
        terminal = MagicMock()

        with temp_git_repository():
            git = Git()
            git.config(
                "user.signingkey",
                "1234567890ABCEDEF1234567890ABCEDEF123456",
                scope=ConfigScope.LOCAL,
            )
            signing_key = find_signing_key(terminal)
            self.assertEqual(
                signing_key, "1234567890ABCEDEF1234567890ABCEDEF123456"
            )

    @unittest.skipUnless(os.environ.get("CI"), "only run on CI")
    def test_find_no_signing_key(self):
        terminal = MagicMock()
        saved_key = None

        git = Git()
        try:
            # save possibly set git signing key from user temporarily
            try:
                saved_key = git.config(
                    "user.signingkey", scope=ConfigScope.GLOBAL
                )
            except GitError:
                saved_key = None

            try:
                git.config("user.signingkey", "", scope=ConfigScope.GLOBAL)
            except subprocess.CalledProcessError as e:
                self.assertEqual(e.returncode, 5)

            with temp_git_repository():
                signing_key = find_signing_key(terminal)
                self.assertEqual(signing_key, "")

        finally:
            # reset the previously saved signing key ...
            if saved_key is not None:
                git.config(
                    "user.signingkey", saved_key, scope=ConfigScope.GLOBAL
                )


class GetNextReleaseVersionTestCase(unittest.TestCase):
    def test_next_major_version(self):
        calculator = SemanticVersioningScheme.calculator()
        last_version = SemanticVersioningScheme.parse_version("1.0.0")
        version = get_next_release_version(
            calculator=calculator,
            last_release_version=last_version,
            release_type=ReleaseType.MAJOR,
            release_version=None,
        )

        self.assertEqual(
            version, SemanticVersioningScheme.parse_version("2.0.0")
        )

    def test_next_minor_version(self):
        calculator = SemanticVersioningScheme.calculator()
        last_version = SemanticVersioningScheme.parse_version("1.0.0")
        version = get_next_release_version(
            calculator=calculator,
            last_release_version=last_version,
            release_type=ReleaseType.MINOR,
            release_version=None,
        )

        self.assertEqual(
            version, SemanticVersioningScheme.parse_version("1.1.0")
        )

    def test_next_patch_version(self):
        calculator = SemanticVersioningScheme.calculator()
        last_version = SemanticVersioningScheme.parse_version("1.0.0")
        version = get_next_release_version(
            calculator=calculator,
            last_release_version=last_version,
            release_type=ReleaseType.PATCH,
            release_version=None,
        )

        self.assertEqual(
            version, SemanticVersioningScheme.parse_version("1.0.1")
        )

    def test_next_calendar_version(self):
        calculator = SemanticVersioningScheme.calculator()
        last_version = SemanticVersioningScheme.parse_version("1.0.0")

        with patch.object(
            calculator,
            "next_calendar_version",
            return_value=SemanticVersioningScheme.parse_version("23.7.0"),
        ):
            version = get_next_release_version(
                calculator=calculator,
                last_release_version=last_version,
                release_type=ReleaseType.CALENDAR,
                release_version=None,
            )

        self.assertEqual(
            version, SemanticVersioningScheme.parse_version("23.7.0")
        )

    def test_next_alpha_version(self):
        calculator = SemanticVersioningScheme.calculator()
        last_version = SemanticVersioningScheme.parse_version("1.0.0")
        version = get_next_release_version(
            calculator=calculator,
            last_release_version=last_version,
            release_type=ReleaseType.ALPHA,
            release_version=None,
        )

        self.assertEqual(
            version, SemanticVersioningScheme.parse_version("1.0.1-alpha1")
        )

    def test_next_beta_version(self):
        calculator = SemanticVersioningScheme.calculator()
        last_version = SemanticVersioningScheme.parse_version("1.0.0")
        version = get_next_release_version(
            calculator=calculator,
            last_release_version=last_version,
            release_type=ReleaseType.BETA,
            release_version=None,
        )

        self.assertEqual(
            version, SemanticVersioningScheme.parse_version("1.0.1-beta1")
        )

    def test_next_release_candidate_version(self):
        calculator = SemanticVersioningScheme.calculator()
        last_version = SemanticVersioningScheme.parse_version("1.0.0")
        version = get_next_release_version(
            calculator=calculator,
            last_release_version=last_version,
            release_type=ReleaseType.RELEASE_CANDIDATE,
            release_version=None,
        )

        self.assertEqual(
            version, SemanticVersioningScheme.parse_version("1.0.1-rc1")
        )

    def test_no_release_type(self):
        calculator = SemanticVersioningScheme.calculator()
        last_version = SemanticVersioningScheme.parse_version("1.0.0")
        next_version = SemanticVersioningScheme.parse_version("1.2.3")
        version = get_next_release_version(
            calculator=calculator,
            last_release_version=last_version,
            release_type=None,
            release_version=next_version,
        )

        self.assertEqual(version, next_version)

    def test_release_type_version(self):
        calculator = SemanticVersioningScheme.calculator()
        last_version = SemanticVersioningScheme.parse_version("1.0.0")
        next_version = SemanticVersioningScheme.parse_version("1.2.3")
        version = get_next_release_version(
            calculator=calculator,
            last_release_version=last_version,
            release_type=ReleaseType.VERSION,
            release_version=next_version,
        )

        self.assertEqual(version, next_version)

    def test_no_release_type_and_release_version(self):
        calculator = SemanticVersioningScheme.calculator()
        last_version = SemanticVersioningScheme.parse_version("1.0.0")

        with self.assertRaisesRegex(
            VersionError,
            "No release version provided. Either use a different release "
            "type or provide a release version.",
        ):
            get_next_release_version(
                calculator=calculator,
                last_release_version=last_version,
                release_type=None,
                release_version=None,
            )

    def test_no_release_type_and_no_last_release_version(self):
        calculator = SemanticVersioningScheme.calculator()
        release_version = SemanticVersioningScheme.parse_version("1.0.0")

        version = get_next_release_version(
            calculator=calculator,
            last_release_version=None,
            release_type=None,
            release_version=release_version,
        )

        self.assertEqual(version, release_version)

    def test_release_type_version_and_no_last_release_version(self):
        calculator = SemanticVersioningScheme.calculator()
        release_version = SemanticVersioningScheme.parse_version("1.0.0")

        version = get_next_release_version(
            calculator=calculator,
            last_release_version=None,
            release_type=ReleaseType.VERSION,
            release_version=release_version,
        )

        self.assertEqual(version, release_version)

    def test_no_last_release_version(self):
        calculator = SemanticVersioningScheme.calculator()

        with self.assertRaisesRegex(
            VersionError,
            "No last release version found for release type alpha. Either "
            "check the project setup or set a release version explicitly.",
        ):
            get_next_release_version(
                calculator=calculator,
                last_release_version=None,
                release_type=ReleaseType.ALPHA,
                release_version=None,
            )

    def test_release_version_with_invalid_release_type(self):
        calculator = SemanticVersioningScheme.calculator()
        release_version = SemanticVersioningScheme.parse_version("1.0.0")

        with self.assertRaisesRegex(
            VersionError,
            "Invalid release type alpha when setting release version "
            "explicitly. Use release type version instead.",
        ):
            get_next_release_version(
                calculator=calculator,
                last_release_version=None,
                release_type=ReleaseType.ALPHA,
                release_version=release_version,
            )


class RepositorySplitTestCase(unittest.TestCase):
    def test_invalid_repository(self):
        with self.assertRaisesRegex(
            ValueError,
            r"Invalid repository foo/bar/baz. Format must be owner/name.",
        ):
            repository_split("foo/bar/baz")

        with self.assertRaisesRegex(
            ValueError,
            r"Invalid repository foo_bar_baz. Format must be owner/name.",
        ):
            repository_split("foo_bar_baz")

    def test_repository(self):
        space, project = repository_split("foo/bar")

        self.assertEqual(space, "foo")
        self.assertEqual(project, "bar")
pontos-25.3.2/tests/release/test_parser.py000066400000000000000000000356231476255566300206500ustar00rootroot00000000000000# Copyright (C) 2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later
#

import unittest
from contextlib import redirect_stderr
from io import StringIO
from pathlib import Path
from unittest.mock import patch

from pontos.release._parser import DEFAULT_SIGNING_KEY, parse_args
from pontos.release.create import create_release
from pontos.release.helper import ReleaseType
from pontos.release.show import OutputFormat, show
from pontos.release.sign import sign
from pontos.version.schemes._pep440 import PEP440Version, PEP440VersioningScheme


class ParseArgsTestCase(unittest.TestCase):
    def test_quiet(self):
        _, _, args = parse_args(["-q", "sign"])
        self.assertTrue(args.quiet)

        _, _, args = parse_args(["sign"])
        self.assertFalse(args.quiet)

    @patch.dict("os.environ", {"GITHUB_USER": "foo"})
    def test_user(self):
        user, _, _ = parse_args(["sign"])
        self.assertEqual(user, "foo")

    @patch.dict("os.environ", {"GITHUB_TOKEN": "foo"})
    def test_token(self):
        _, token, _ = parse_args(["sign"])
        self.assertEqual(token, "foo")


class CreateParseArgsTestCase(unittest.TestCase):
    def test_create_func(self):
        _, _, args = parse_args(["create", "--release-type", "patch"])

        self.assertEqual(args.func, create_release)

    def test_release_alias(self):
        _, _, args = parse_args(["release", "--release-type", "patch"])

        self.assertEqual(args.func, create_release)

    def test_default(self):
        _, _, args = parse_args(["create", "--release-type", "patch"])

        self.assertEqual(args.git_tag_prefix, "v")
        self.assertFalse(args.local)
        self.assertFalse(args.github_pre_release)

    def test_git_remote_name(self):
        _, _, args = parse_args(
            ["create", "--git-remote-name", "foo", "--release-type", "patch"]
        )

        self.assertEqual(args.git_remote_name, "foo")

    def test_git_signing_key(self):
        _, _, args = parse_args(
            ["create", "--git-signing-key", "123", "--release-type", "patch"]
        )

        self.assertEqual(args.git_signing_key, "123")

    def test_git_tag_prefix(self):
        _, _, args = parse_args(
            ["create", "--git-tag-prefix", "a", "--release-type", "patch"]
        )

        self.assertEqual(args.git_tag_prefix, "a")

        _, _, args = parse_args(
            ["create", "--git-tag-prefix", "", "--release-type", "patch"]
        )

        self.assertEqual(args.git_tag_prefix, "")

        _, _, args = parse_args(
            ["create", "--git-tag-prefix", "--release-type", "patch"]
        )

        self.assertEqual(args.git_tag_prefix, "")

    def test_repository(self):
        _, _, args = parse_args(
            ["create", "--repository", "foo/bar", "--release-type", "patch"]
        )

        self.assertEqual(args.repository, "foo/bar")

    def test_invalid_repository(self):
        with redirect_stderr(StringIO()), self.assertRaises(SystemExit):
            parse_args(
                ["create", "--repository", "foo_bar", "--release-type", "patch"]
            )

        with redirect_stderr(StringIO()), self.assertRaises(SystemExit):
            parse_args(
                [
                    "sign",
                    "--repository",
                    "foo/bar/baz",
                    "--release-type",
                    "patch",
                ]
            )

    def test_next_version(self):
        _, _, args = parse_args(
            ["create", "--next-version", "1.2.3", "--release-type", "patch"]
        )

        self.assertEqual(args.next_version, PEP440Version("1.2.3"))

    def test_no_next_version(self):
        _, _, args = parse_args(
            ["create", "--no-next-version", "--release-type", "patch"]
        )

        self.assertFalse(args.next_version)

    def test_next_version_conflict(self):
        with self.assertRaises(SystemExit), redirect_stderr(StringIO()):
            parse_args(
                [
                    "create",
                    "--release-type",
                    "patch",
                    "--no-next-version",
                    "--next-version",
                    "1.2.3",
                ]
            )

    def test_last_release_version(self):
        _, _, args = parse_args(
            [
                "create",
                "--release-type",
                "patch",
                "--last-release-version",
                "1.2.3",
            ]
        )

        self.assertEqual(args.func, create_release)
        self.assertEqual(args.last_release_version, PEP440Version("1.2.3"))

    def test_release_type(self):
        _, _, args = parse_args(["create", "--release-type", "patch"])

        self.assertEqual(args.release_type, ReleaseType.PATCH)

        _, _, args = parse_args(["create", "--release-type", "calendar"])

        self.assertEqual(args.release_type, ReleaseType.CALENDAR)

        _, _, args = parse_args(["create", "--release-type", "minor"])

        self.assertEqual(args.release_type, ReleaseType.MINOR)

        _, _, args = parse_args(["create", "--release-type", "major"])

        self.assertEqual(args.release_type, ReleaseType.MAJOR)

        _, _, args = parse_args(["create", "--release-type", "alpha"])

        self.assertEqual(args.release_type, ReleaseType.ALPHA)

        _, _, args = parse_args(["create", "--release-type", "beta"])

        self.assertEqual(args.release_type, ReleaseType.BETA)

        _, _, args = parse_args(
            ["create", "--release-type", "release-candidate"]
        )

        self.assertEqual(args.release_type, ReleaseType.RELEASE_CANDIDATE)

        with self.assertRaises(SystemExit), redirect_stderr(StringIO()):
            parse_args(["create", "--release-type", "foo"])

    def test_release_type_version_without_release_version(self):
        with self.assertRaises(SystemExit), redirect_stderr(StringIO()):
            parse_args(["create", "--release-type", "version"])

        _, _, args = parse_args(
            [
                "create",
                "--release-type",
                "version",
                "--release-version",
                "1.2.3",
            ]
        )
        self.assertEqual(args.release_type, ReleaseType.VERSION)
        self.assertEqual(args.release_version, PEP440Version("1.2.3"))

    def test_release_version(self):
        _, _, args = parse_args(["create", "--release-version", "1.2.3"])

        self.assertEqual(args.release_version, PEP440Version("1.2.3"))
        self.assertEqual(args.release_type, ReleaseType.VERSION)

        with self.assertRaises(SystemExit), redirect_stderr(StringIO()):
            parse_args(["create", "--release-version", "1.2.3", "--patch"])

        with self.assertRaises(SystemExit), redirect_stderr(StringIO()):
            parse_args(["create", "--release-version", "1.2.3", "--calendar"])

        with self.assertRaises(SystemExit), redirect_stderr(StringIO()):
            parse_args(
                [
                    "create",
                    "--release-version",
                    "1.2.3",
                    "--release-type",
                    "patch",
                ]
            )

        with self.assertRaises(SystemExit), redirect_stderr(StringIO()):
            parse_args(
                [
                    "create",
                    "--release-version",
                    "1.2.3",
                    "--release-type",
                    "calendar",
                ]
            )

    def test_local(self):
        _, _, args = parse_args(
            ["create", "--local", "--release-type", "patch"]
        )

        self.assertTrue(args.local)

    def test_conventional_commits_config(self):
        _, _, args = parse_args(
            [
                "create",
                "--conventional-commits-config",
                "foo.toml",
                "--release-type",
                "patch",
            ]
        )

        self.assertEqual(args.cc_config, Path("foo.toml"))

    def test_changelog_conflict(self):
        with self.assertRaises(SystemExit), redirect_stderr(StringIO()):
            parse_args(
                [
                    "create",
                    "--release-type",
                    "patch",
                    "--changelog",
                    "foo.md",
                    "--conventional-commits-config",
                    "bar.toml",
                ]
            )

    def test_changelog(self):
        _, _, args = parse_args(
            [
                "create",
                "--changelog",
                "foo.md",
                "--release-type",
                "patch",
            ]
        )

        self.assertEqual(args.changelog, Path("foo.md"))

    def test_release_series(self):
        _, _, args = parse_args(
            ["create", "--release-type", "patch", "--release-series", "22.4"]
        )

        self.assertEqual(args.release_series, "22.4")

    def test_update_project(self):
        _, _, args = parse_args(["create", "--release-type", "patch"])

        self.assertTrue(args.update_project)

        _, _, args = parse_args(
            ["create", "--release-type", "patch", "--update-project"]
        )

        self.assertTrue(args.update_project)

        _, _, args = parse_args(
            ["create", "--release-type", "patch", "--no-update-project"]
        )

        self.assertFalse(args.update_project)

    def test_github_pre_release(self):
        _, _, args = parse_args(
            ["create", "--release-type", "patch", "--github-pre-release"]
        )

        self.assertTrue(args.github_pre_release)


class SignParseArgsTestCase(unittest.TestCase):
    def test_sign_func(self):
        _, _, args = parse_args(["sign"])

        self.assertEqual(args.func, sign)

    def test_default(self):
        _, _, args = parse_args(["sign"])

        self.assertEqual(args.git_tag_prefix, "v")
        self.assertEqual(args.signing_key, DEFAULT_SIGNING_KEY)

    def test_repository(self):
        _, _, args = parse_args(["sign", "--repository", "foo/bar"])

        self.assertEqual(args.repository, "foo/bar")

    def test_invalid_repository(self):
        with redirect_stderr(StringIO()), self.assertRaises(SystemExit):
            parse_args(["sign", "--repository", "foo_bar"])

        with redirect_stderr(StringIO()), self.assertRaises(SystemExit):
            parse_args(["sign", "--repository", "foo/bar/baz"])

    def test_release_version(self):
        _, _, args = parse_args(["sign", "--release-version", "1.2.3"])

        self.assertEqual(args.release_version, PEP440Version("1.2.3"))

    def test_dry_run(self):
        _, _, args = parse_args(["sign", "--dry-run"])

        self.assertTrue(args.dry_run)

    def test_signing_key(self):
        _, _, args = parse_args(["sign", "--signing-key", "123"])

        self.assertEqual(args.signing_key, "123")

    def test_passphrase(self):
        _, _, args = parse_args(["sign", "--passphrase", "123"])

        self.assertEqual(args.passphrase, "123")

    def test_release_series(self):
        _, _, args = parse_args(["sign", "--release-series", "22.4"])

        self.assertEqual(args.release_series, "22.4")


class ShowParseArgsTestCase(unittest.TestCase):
    def test_show_func(self):
        _, _, args = parse_args(["show", "--release-type", "patch"])

        self.assertEqual(args.func, show)

    def test_defaults(self):
        _, _, args = parse_args(["show", "--release-type", "patch"])

        self.assertEqual(args.git_tag_prefix, "v")
        self.assertEqual(args.versioning_scheme, PEP440VersioningScheme)

    def test_release_series(self):
        _, _, args = parse_args(
            ["show", "--release-type", "patch", "--release-series", "1.2"]
        )

        self.assertEqual(args.release_series, "1.2")

    def test_release_type(self):
        _, _, args = parse_args(["show", "--release-type", "patch"])

        self.assertEqual(args.release_type, ReleaseType.PATCH)

        _, _, args = parse_args(["show", "--release-type", "calendar"])

        self.assertEqual(args.release_type, ReleaseType.CALENDAR)

        _, _, args = parse_args(["show", "--release-type", "minor"])

        self.assertEqual(args.release_type, ReleaseType.MINOR)

        _, _, args = parse_args(["show", "--release-type", "major"])

        self.assertEqual(args.release_type, ReleaseType.MAJOR)

        _, _, args = parse_args(["show", "--release-type", "alpha"])

        self.assertEqual(args.release_type, ReleaseType.ALPHA)

        _, _, args = parse_args(["show", "--release-type", "beta"])

        self.assertEqual(args.release_type, ReleaseType.BETA)

        _, _, args = parse_args(["show", "--release-type", "release-candidate"])

        self.assertEqual(args.release_type, ReleaseType.RELEASE_CANDIDATE)

        with self.assertRaises(SystemExit), redirect_stderr(StringIO()):
            parse_args(["show", "--release-type", "foo"])

    def test_git_tag_prefix(self):
        _, _, args = parse_args(
            ["show", "--git-tag-prefix", "a", "--release-type", "patch"]
        )

        self.assertEqual(args.git_tag_prefix, "a")

        _, _, args = parse_args(
            ["show", "--git-tag-prefix", "", "--release-type", "patch"]
        )

        self.assertEqual(args.git_tag_prefix, "")

        _, _, args = parse_args(
            ["show", "--git-tag-prefix", "--release-type", "patch"]
        )

        self.assertEqual(args.git_tag_prefix, "")

    def test_release_version(self):
        _, _, args = parse_args(["show", "--release-version", "1.2.3"])

        self.assertEqual(args.release_version, PEP440Version("1.2.3"))
        self.assertEqual(args.release_type, ReleaseType.VERSION)

        with self.assertRaises(SystemExit), redirect_stderr(StringIO()):
            parse_args(
                [
                    "show",
                    "--release-version",
                    "1.2.3",
                    "--release-type",
                    "patch",
                ]
            )

        with self.assertRaises(SystemExit), redirect_stderr(StringIO()):
            parse_args(
                [
                    "show",
                    "--release-version",
                    "1.2.3",
                    "--release-type",
                    "calendar",
                ]
            )

    def test_output_format(self):
        _, _, args = parse_args(
            ["show", "--release-type", "patch", "--output-format", "env"]
        )

        self.assertEqual(args.output_format, OutputFormat.ENV)

        _, _, args = parse_args(
            ["show", "--release-type", "patch", "--output-format", "json"]
        )

        self.assertEqual(args.output_format, OutputFormat.JSON)

        _, _, args = parse_args(
            [
                "show",
                "--release-type",
                "patch",
                "--output-format",
                "github-action",
            ]
        )

        with patch.dict(
            "os.environ", {"GITHUB_OUTPUT": "/tmp/output"}, clear=True
        ):
            self.assertEqual(args.output_format, OutputFormat.GITHUB_ACTION)
pontos-25.3.2/tests/release/test_show.py000066400000000000000000000274361476255566300203370ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later


import unittest
from pathlib import Path
from unittest.mock import MagicMock, call, patch

from pontos.git import Git
from pontos.release.helper import ReleaseType
from pontos.release.show import (
    OutputFormat,
    ShowReleaseCommand,
    ShowReleaseReturnValue,
)
from pontos.testing import temp_file, temp_git_repository
from pontos.version.schemes import PEP440VersioningScheme


def setup_git_repo(temp_git: Path) -> None:
    some_file = temp_git / "some-file.txt"
    some_file.touch()

    git = Git()
    git.add(some_file)
    git.commit("Add some file", gpg_sign=False, verify=False)
    git.tag("v1.0.0", sign=False)


class ShowTestCase(unittest.TestCase):
    def test_env_output(self):
        terminal = MagicMock()
        with temp_git_repository() as temp_git:
            setup_git_repo(temp_git)

            show_cmd = ShowReleaseCommand(
                terminal=terminal, error_terminal=MagicMock()
            )

            return_val = show_cmd.run(
                versioning_scheme=PEP440VersioningScheme,
                release_type=ReleaseType.PATCH,
                release_version=None,
                git_tag_prefix="v",
                output_format=OutputFormat.ENV,
            )

            self.assertEqual(return_val, ShowReleaseReturnValue.SUCCESS)

            terminal.print.assert_has_calls(
                [
                    call("LAST_RELEASE_VERSION=1.0.0"),
                    call("LAST_RELEASE_VERSION_MAJOR=1"),
                    call("LAST_RELEASE_VERSION_MINOR=0"),
                    call("LAST_RELEASE_VERSION_PATCH=0"),
                    call("RELEASE_VERSION=1.0.1"),
                    call("RELEASE_VERSION_MAJOR=1"),
                    call("RELEASE_VERSION_MINOR=0"),
                    call("RELEASE_VERSION_PATCH=1"),
                ]
            )

            return_val = show_cmd.run(
                versioning_scheme=PEP440VersioningScheme,
                release_type=ReleaseType.MINOR,
                release_version=None,
                git_tag_prefix="v",
                output_format=OutputFormat.ENV,
            )

            self.assertEqual(return_val, ShowReleaseReturnValue.SUCCESS)

            terminal.print.assert_has_calls(
                [
                    call("LAST_RELEASE_VERSION=1.0.0"),
                    call("LAST_RELEASE_VERSION_MAJOR=1"),
                    call("LAST_RELEASE_VERSION_MINOR=0"),
                    call("LAST_RELEASE_VERSION_PATCH=0"),
                    call("RELEASE_VERSION=1.1.0"),
                    call("RELEASE_VERSION_MAJOR=1"),
                    call("RELEASE_VERSION_MINOR=1"),
                    call("RELEASE_VERSION_PATCH=0"),
                ]
            )

            return_val = show_cmd.run(
                versioning_scheme=PEP440VersioningScheme,
                release_type=ReleaseType.MAJOR,
                release_version=None,
                git_tag_prefix="v",
                output_format=OutputFormat.ENV,
            )

            self.assertEqual(return_val, ShowReleaseReturnValue.SUCCESS)

            terminal.print.assert_has_calls(
                [
                    call("LAST_RELEASE_VERSION=1.0.0"),
                    call("LAST_RELEASE_VERSION_MAJOR=1"),
                    call("LAST_RELEASE_VERSION_MINOR=0"),
                    call("LAST_RELEASE_VERSION_PATCH=0"),
                    call("RELEASE_VERSION=2.0.0"),
                    call("RELEASE_VERSION_MAJOR=2"),
                    call("RELEASE_VERSION_MINOR=0"),
                    call("RELEASE_VERSION_PATCH=0"),
                ]
            )

    def test_json_output(self):
        terminal = MagicMock()
        with temp_git_repository() as temp_git:
            setup_git_repo(temp_git)

            show_cmd = ShowReleaseCommand(
                terminal=terminal, error_terminal=MagicMock()
            )

            return_val = show_cmd.run(
                versioning_scheme=PEP440VersioningScheme,
                release_type=ReleaseType.PATCH,
                release_version=None,
                git_tag_prefix="v",
                output_format=OutputFormat.JSON,
            )

            expected = """{
  "release_version": "1.0.1",
  "release_version_major": 1,
  "release_version_minor": 0,
  "release_version_patch": 1,
  "last_release_version": "1.0.0",
  "last_release_version_major": 1,
  "last_release_version_minor": 0,
  "last_release_version_patch": 0
}"""

            self.assertEqual(return_val, ShowReleaseReturnValue.SUCCESS)

            terminal.print.assert_called_once_with(expected)
            terminal.reset_mock()

            return_val = show_cmd.run(
                versioning_scheme=PEP440VersioningScheme,
                release_type=ReleaseType.MINOR,
                release_version=None,
                git_tag_prefix="v",
                output_format=OutputFormat.JSON,
            )

            expected = """{
  "release_version": "1.1.0",
  "release_version_major": 1,
  "release_version_minor": 1,
  "release_version_patch": 0,
  "last_release_version": "1.0.0",
  "last_release_version_major": 1,
  "last_release_version_minor": 0,
  "last_release_version_patch": 0
}"""

            self.assertEqual(return_val, ShowReleaseReturnValue.SUCCESS)

            terminal.print.assert_called_once_with(expected)
            terminal.reset_mock()

            return_val = show_cmd.run(
                versioning_scheme=PEP440VersioningScheme,
                release_type=ReleaseType.MAJOR,
                release_version=None,
                git_tag_prefix="v",
                output_format=OutputFormat.JSON,
            )

            expected = """{
  "release_version": "2.0.0",
  "release_version_major": 2,
  "release_version_minor": 0,
  "release_version_patch": 0,
  "last_release_version": "1.0.0",
  "last_release_version_major": 1,
  "last_release_version_minor": 0,
  "last_release_version_patch": 0
}"""

            self.assertEqual(return_val, ShowReleaseReturnValue.SUCCESS)

            terminal.print.assert_called_once_with(expected)

    def test_github_action_output(self):
        terminal = MagicMock()
        with temp_git_repository() as temp_git:
            setup_git_repo(temp_git)

            show_cmd = ShowReleaseCommand(
                terminal=terminal, error_terminal=MagicMock()
            )

            output_file = temp_git.absolute() / "output"

            with patch.dict(
                "os.environ", {"GITHUB_OUTPUT": f"{output_file}"}, clear=True
            ):
                return_val = show_cmd.run(
                    versioning_scheme=PEP440VersioningScheme,
                    release_type=ReleaseType.PATCH,
                    release_version=None,
                    git_tag_prefix="v",
                    output_format=OutputFormat.GITHUB_ACTION,
                )

                self.assertEqual(return_val, ShowReleaseReturnValue.SUCCESS)

                expected = """last_release_version=1.0.0
last_release_version_major=1
last_release_version_minor=0
last_release_version_patch=0
release_version_major=1
release_version_minor=0
release_version_patch=1
release_version=1.0.1
"""
                actual = output_file.read_text(encoding="utf8")

                self.assertEqual(expected, actual)

                output_file.unlink()

                return_val = show_cmd.run(
                    versioning_scheme=PEP440VersioningScheme,
                    release_type=ReleaseType.MINOR,
                    release_version=None,
                    git_tag_prefix="v",
                    output_format=OutputFormat.GITHUB_ACTION,
                )

                self.assertEqual(return_val, ShowReleaseReturnValue.SUCCESS)

                expected = """last_release_version=1.0.0
last_release_version_major=1
last_release_version_minor=0
last_release_version_patch=0
release_version_major=1
release_version_minor=1
release_version_patch=0
release_version=1.1.0
"""
                actual = output_file.read_text(encoding="utf8")

                self.assertEqual(expected, actual)

                output_file.unlink()

                return_val = show_cmd.run(
                    versioning_scheme=PEP440VersioningScheme,
                    release_type=ReleaseType.MAJOR,
                    release_version=None,
                    git_tag_prefix="v",
                    output_format=OutputFormat.GITHUB_ACTION,
                )

                self.assertEqual(return_val, ShowReleaseReturnValue.SUCCESS)

                expected = """last_release_version=1.0.0
last_release_version_major=1
last_release_version_minor=0
last_release_version_patch=0
release_version_major=2
release_version_minor=0
release_version_patch=0
release_version=2.0.0
"""
                actual = output_file.read_text(encoding="utf8")

                self.assertEqual(expected, actual)

                output_file.unlink()

    def test_initial_release(self):
        terminal = MagicMock()
        with temp_git_repository():
            show_cmd = ShowReleaseCommand(
                terminal=terminal, error_terminal=MagicMock()
            )

            return_val = show_cmd.run(
                versioning_scheme=PEP440VersioningScheme,
                release_type=ReleaseType.VERSION,
                release_version=PEP440VersioningScheme.parse_version("1.0.0"),
                git_tag_prefix="v",
                output_format=OutputFormat.ENV,
            )

            self.assertEqual(return_val, ShowReleaseReturnValue.SUCCESS)

            terminal.print.assert_has_calls(
                [
                    call("LAST_RELEASE_VERSION="),
                    call("LAST_RELEASE_VERSION_MAJOR="),
                    call("LAST_RELEASE_VERSION_MINOR="),
                    call("LAST_RELEASE_VERSION_PATCH="),
                    call("RELEASE_VERSION=1.0.0"),
                    call("RELEASE_VERSION_MAJOR=1"),
                    call("RELEASE_VERSION_MINOR=0"),
                    call("RELEASE_VERSION_PATCH=0"),
                ]
            )

            terminal.reset_mock()

            return_val = show_cmd.run(
                versioning_scheme=PEP440VersioningScheme,
                release_type=ReleaseType.VERSION,
                release_version=PEP440VersioningScheme.parse_version("1.0.0"),
                git_tag_prefix="v",
                output_format=OutputFormat.JSON,
            )

            expected = """{
  "release_version": "1.0.0",
  "release_version_major": 1,
  "release_version_minor": 0,
  "release_version_patch": 0,
  "last_release_version": "",
  "last_release_version_major": "",
  "last_release_version_minor": "",
  "last_release_version_patch": ""
}"""

            self.assertEqual(return_val, ShowReleaseReturnValue.SUCCESS)

            terminal.print.assert_called_once_with(expected)

            with (
                temp_file(name="output") as output_file,
                patch.dict(
                    "os.environ",
                    {"GITHUB_OUTPUT": f"{output_file}"},
                    clear=True,
                ),
            ):
                return_val = show_cmd.run(
                    versioning_scheme=PEP440VersioningScheme,
                    release_type=ReleaseType.VERSION,
                    release_version=PEP440VersioningScheme.parse_version(
                        "1.0.0"
                    ),
                    git_tag_prefix="v",
                    output_format=OutputFormat.GITHUB_ACTION,
                )

                self.assertEqual(return_val, ShowReleaseReturnValue.SUCCESS)

                expected = """last_release_version=
last_release_version_major=
last_release_version_minor=
last_release_version_patch=
release_version_major=1
release_version_minor=0
release_version_patch=0
release_version=1.0.0
"""
                actual = output_file.read_text(encoding="utf8")

                self.assertEqual(expected, actual)
pontos-25.3.2/tests/release/test_sign.py000066400000000000000000000617261476255566300203170ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2020-2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later
#
# pylint: disable=C0413,W0108

import unittest
from asyncio.subprocess import Process
from pathlib import Path
from unittest.mock import AsyncMock, MagicMock, call, patch

import httpx

from pontos.release.main import parse_args
from pontos.release.sign import SignReturnValue, sign
from pontos.terminal.rich import RichTerminal
from pontos.testing import AsyncIteratorMock, temp_directory


def mock_terminal() -> MagicMock:
    return MagicMock(spec=RichTerminal)


@patch.dict("os.environ", {"GITHUB_TOKEN": "foo"})
class SignTestCase(unittest.TestCase):
    @patch.dict("os.environ", {"GITHUB_TOKEN": ""})
    def test_no_token(self):
        _, token, args = parse_args(
            [
                "sign",
                "--repository",
                "greenbone/foo",
                "--release-version",
                "0.0.1",
            ]
        )

        result = sign(
            terminal=mock_terminal(),
            error_terminal=mock_terminal(),
            args=args,
            token=token,
        )

        self.assertEqual(result, SignReturnValue.TOKEN_MISSING)

    def test_no_release_error(self):
        with temp_directory(change_into=True):
            _, token, args = parse_args(
                [
                    "sign",
                    "--repository",
                    "greenbone/foo",
                ]
            )

            result = sign(
                terminal=mock_terminal(),
                error_terminal=mock_terminal(),
                args=args,
                token=token,
            )

            self.assertEqual(result, SignReturnValue.NO_RELEASE_VERSION)

    def test_invalid_repository(self):
        with temp_directory(change_into=True):
            _, token, args = parse_args(
                [
                    "sign",
                    "--repository",
                    "foo/bar",
                ]
            )

            setattr(args, "repository", "foo_bar")

            result = sign(
                terminal=mock_terminal(),
                error_terminal=mock_terminal(),
                args=args,
                token=token,
            )

            self.assertEqual(result, SignReturnValue.INVALID_REPOSITORY)

    @patch("pontos.release.sign.get_last_release_version", autospec=True)
    def test_no_release_version(self, get_last_release_version_mock: MagicMock):
        get_last_release_version_mock.return_value = None

        with temp_directory(change_into=True):
            _, token, args = parse_args(
                [
                    "sign",
                    "--repository",
                    "foo/bar",
                ]
            )

            result = sign(
                terminal=mock_terminal(),
                error_terminal=mock_terminal(),
                args=args,
                token=token,
            )

            self.assertEqual(result, SignReturnValue.NO_RELEASE_VERSION)

    @patch("pontos.release.sign.GitHubAsyncRESTApi.releases", autospec=True)
    def test_release_does_not_exist(self, github_mock: AsyncMock):
        github_mock.exists = AsyncMock(return_value=False)

        with temp_directory(change_into=True):
            _, token, args = parse_args(
                [
                    "sign",
                    "--repository",
                    "foo/bar",
                    "--release-version",
                    "1.2.3",
                ]
            )

            result = sign(
                terminal=mock_terminal(),
                error_terminal=mock_terminal(),
                args=args,
                token=token,
            )

            self.assertEqual(result, SignReturnValue.NO_RELEASE)

    @patch("pontos.release.sign.cmd_runner", autospec=True)
    @patch("pontos.release.sign.SignCommand.download_asset", autospec=True)
    @patch("pontos.release.sign.SignCommand.download_tar", autospec=True)
    @patch("pontos.release.sign.SignCommand.download_zip", autospec=True)
    @patch("pontos.release.sign.GitHubAsyncRESTApi.releases", autospec=True)
    def test_sign_success(
        self,
        github_releases_mock: AsyncMock,
        download_zip_mock: AsyncMock,
        download_tar_mock: AsyncMock,
        download_asset_mock: AsyncMock,
        cmd_runner_mock: AsyncMock,
    ):
        tar_file = Path("file.tar")
        zip_file = Path("file.zip")
        some_asset = Path("file1")
        other_asset = Path("file2")
        download_tar_mock.return_value = tar_file
        download_zip_mock.return_value = zip_file
        download_asset_mock.side_effect = [some_asset, other_asset]
        github_releases_mock.exists = AsyncMock(return_value=True)
        github_releases_mock.download_release_assets.return_value = (
            AsyncIteratorMock(
                [
                    (
                        "foo",
                        MagicMock(),
                    ),
                    ("bar", MagicMock()),
                ]
            )
        )
        process = AsyncMock(spec=Process, returncode=0)
        process.communicate.return_value = ("", "")
        cmd_runner_mock.return_value = process

        with temp_directory(change_into=True):
            _, token, args = parse_args(
                [
                    "sign",
                    "--repository",
                    "greenbone/foo",
                    "--release-version",
                    "1.2.3",
                ]
            )

            result = sign(
                terminal=mock_terminal(),
                error_terminal=mock_terminal(),
                args=args,
                token=token,
            )

            self.assertEqual(result, SignReturnValue.SUCCESS)

            cmd_runner_mock.assert_has_calls(
                [
                    call(
                        "gpg",
                        "--default-key",
                        "0ED1E580",
                        "--yes",
                        "--detach-sign",
                        "--armor",
                        zip_file,
                    ),
                    call(
                        "gpg",
                        "--default-key",
                        "0ED1E580",
                        "--yes",
                        "--detach-sign",
                        "--armor",
                        tar_file,
                    ),
                    call(
                        "gpg",
                        "--default-key",
                        "0ED1E580",
                        "--yes",
                        "--detach-sign",
                        "--armor",
                        some_asset,
                    ),
                    call(
                        "gpg",
                        "--default-key",
                        "0ED1E580",
                        "--yes",
                        "--detach-sign",
                        "--armor",
                        other_asset,
                    ),
                ]
            )

            github_releases_mock.upload_release_assets.assert_called_once_with(
                "greenbone/foo",
                "v1.2.3",
                [
                    (Path("file.zip.asc"), "application/pgp-signature"),
                    (Path("file.tar.asc"), "application/pgp-signature"),
                    (Path("file1.asc"), "application/pgp-signature"),
                    (Path("file2.asc"), "application/pgp-signature"),
                ],
            )

    @patch("pontos.version.helper.Git", autospec=True)
    @patch("pontos.release.sign.cmd_runner", autospec=True)
    @patch("pontos.release.sign.SignCommand.download_asset", autospec=True)
    @patch("pontos.release.sign.SignCommand.download_tar", autospec=True)
    @patch("pontos.release.sign.SignCommand.download_zip", autospec=True)
    @patch("pontos.release.sign.GitHubAsyncRESTApi.releases", autospec=True)
    def test_sign_success_determine_release_version(
        self,
        github_releases_mock: AsyncMock,
        download_zip_mock: AsyncMock,
        download_tar_mock: AsyncMock,
        download_asset_mock: AsyncMock,
        cmd_runner_mock: AsyncMock,
        git_mock: MagicMock,
    ):
        tar_file = Path("file.tar")
        zip_file = Path("file.zip")
        some_asset = Path("file1")
        other_asset = Path("file2")
        download_tar_mock.return_value = tar_file
        download_zip_mock.return_value = zip_file
        download_asset_mock.side_effect = [some_asset, other_asset]
        github_releases_mock.exists = AsyncMock(return_value=True)
        github_releases_mock.download_release_assets.return_value = (
            AsyncIteratorMock(
                [
                    (
                        "foo",
                        MagicMock(),
                    ),
                    ("bar", MagicMock()),
                ]
            )
        )
        process = AsyncMock(spec=Process, returncode=0)
        process.communicate.return_value = ("", "")
        cmd_runner_mock.return_value = process
        git_mock.return_value.list_tags.return_value = ["v1.0.0", "v1.2.3"]

        with temp_directory(change_into=True):
            _, token, args = parse_args(
                [
                    "sign",
                    "--repository",
                    "greenbone/foo",
                ]
            )

            result = sign(
                terminal=mock_terminal(),
                error_terminal=mock_terminal(),
                args=args,
                token=token,
            )

            self.assertEqual(result, SignReturnValue.SUCCESS)

            cmd_runner_mock.assert_has_calls(
                [
                    call(
                        "gpg",
                        "--default-key",
                        "0ED1E580",
                        "--yes",
                        "--detach-sign",
                        "--armor",
                        zip_file,
                    ),
                    call(
                        "gpg",
                        "--default-key",
                        "0ED1E580",
                        "--yes",
                        "--detach-sign",
                        "--armor",
                        tar_file,
                    ),
                    call(
                        "gpg",
                        "--default-key",
                        "0ED1E580",
                        "--yes",
                        "--detach-sign",
                        "--armor",
                        some_asset,
                    ),
                    call(
                        "gpg",
                        "--default-key",
                        "0ED1E580",
                        "--yes",
                        "--detach-sign",
                        "--armor",
                        other_asset,
                    ),
                ]
            )

            github_releases_mock.upload_release_assets.assert_called_once_with(
                "greenbone/foo",
                "v1.2.3",
                [
                    (Path("file.zip.asc"), "application/pgp-signature"),
                    (Path("file.tar.asc"), "application/pgp-signature"),
                    (Path("file1.asc"), "application/pgp-signature"),
                    (Path("file2.asc"), "application/pgp-signature"),
                ],
            )

    @patch("pontos.version.helper.Git", autospec=True)
    @patch("pontos.release.sign.cmd_runner", autospec=True)
    @patch("pontos.release.sign.SignCommand.download_asset", autospec=True)
    @patch("pontos.release.sign.SignCommand.download_tar", autospec=True)
    @patch("pontos.release.sign.SignCommand.download_zip", autospec=True)
    @patch("pontos.release.sign.GitHubAsyncRESTApi.releases", autospec=True)
    def test_sign_success_determine_release_version_with_release_series(
        self,
        github_releases_mock: AsyncMock,
        download_zip_mock: AsyncMock,
        download_tar_mock: AsyncMock,
        download_asset_mock: AsyncMock,
        cmd_runner_mock: AsyncMock,
        git_mock: MagicMock,
    ):
        tar_file = Path("file.tar")
        zip_file = Path("file.zip")
        some_asset = Path("file1")
        other_asset = Path("file2")
        download_tar_mock.return_value = tar_file
        download_zip_mock.return_value = zip_file
        download_asset_mock.side_effect = [some_asset, other_asset]
        github_releases_mock.exists = AsyncMock(return_value=True)
        github_releases_mock.download_release_assets.return_value = (
            AsyncIteratorMock(
                [
                    (
                        "foo",
                        MagicMock(),
                    ),
                    ("bar", MagicMock()),
                ]
            )
        )
        process = AsyncMock(spec=Process, returncode=0)
        process.communicate.return_value = ("", "")
        cmd_runner_mock.return_value = process
        git_mock.return_value.list_tags.return_value = [
            "v2.0.1",
        ]

        with temp_directory(change_into=True):
            _, token, args = parse_args(
                [
                    "sign",
                    "--repository",
                    "greenbone/foo",
                    "--release-series",
                    "2",
                ]
            )

            result = sign(
                terminal=mock_terminal(),
                error_terminal=mock_terminal(),
                args=args,
                token=token,
            )

            self.assertEqual(result, SignReturnValue.SUCCESS)

            cmd_runner_mock.assert_has_calls(
                [
                    call(
                        "gpg",
                        "--default-key",
                        "0ED1E580",
                        "--yes",
                        "--detach-sign",
                        "--armor",
                        zip_file,
                    ),
                    call(
                        "gpg",
                        "--default-key",
                        "0ED1E580",
                        "--yes",
                        "--detach-sign",
                        "--armor",
                        tar_file,
                    ),
                    call(
                        "gpg",
                        "--default-key",
                        "0ED1E580",
                        "--yes",
                        "--detach-sign",
                        "--armor",
                        some_asset,
                    ),
                    call(
                        "gpg",
                        "--default-key",
                        "0ED1E580",
                        "--yes",
                        "--detach-sign",
                        "--armor",
                        other_asset,
                    ),
                ]
            )

            github_releases_mock.upload_release_assets.assert_called_once_with(
                "greenbone/foo",
                "v2.0.1",
                [
                    (Path("file.zip.asc"), "application/pgp-signature"),
                    (Path("file.tar.asc"), "application/pgp-signature"),
                    (Path("file1.asc"), "application/pgp-signature"),
                    (Path("file2.asc"), "application/pgp-signature"),
                ],
            )

    @patch("pontos.release.sign.cmd_runner", autospec=True)
    @patch("pontos.release.sign.SignCommand.download_asset", autospec=True)
    @patch("pontos.release.sign.SignCommand.download_tar", autospec=True)
    @patch("pontos.release.sign.SignCommand.download_zip", autospec=True)
    @patch("pontos.release.sign.GitHubAsyncRESTApi.releases", autospec=True)
    def test_sign_success_dry_run(
        self,
        github_releases_mock: AsyncMock,
        download_zip_mock: AsyncMock,
        download_tar_mock: AsyncMock,
        download_asset_mock: AsyncMock,
        cmd_runner_mock: MagicMock,
    ):
        tar_file = Path("file.tar")
        zip_file = Path("file.zip")
        some_asset = Path("file1")
        other_asset = Path("file2")
        download_tar_mock.return_value = tar_file
        download_zip_mock.return_value = zip_file
        download_asset_mock.side_effect = [some_asset, other_asset]
        github_releases_mock.exists = AsyncMock(return_value=True)
        github_releases_mock.download_release_assets.return_value = (
            AsyncIteratorMock(
                [
                    (
                        "foo",
                        MagicMock(),
                    ),
                    ("bar", MagicMock()),
                ]
            )
        )
        process = AsyncMock(spec=Process, returncode=0)
        process.communicate.return_value = ("", "")
        cmd_runner_mock.return_value = process

        with temp_directory(change_into=True):
            _, token, args = parse_args(
                [
                    "sign",
                    "--repository",
                    "greenbone/foo",
                    "--release-version",
                    "1.2.3",
                    "--dry-run",
                ]
            )

            result = sign(
                terminal=mock_terminal(),
                error_terminal=mock_terminal(),
                args=args,
                token=token,
            )

            self.assertEqual(result, SignReturnValue.SUCCESS)

            cmd_runner_mock.assert_has_calls(
                [
                    call(
                        "gpg",
                        "--default-key",
                        "0ED1E580",
                        "--yes",
                        "--detach-sign",
                        "--armor",
                        zip_file,
                    ),
                    call(
                        "gpg",
                        "--default-key",
                        "0ED1E580",
                        "--yes",
                        "--detach-sign",
                        "--armor",
                        tar_file,
                    ),
                    call(
                        "gpg",
                        "--default-key",
                        "0ED1E580",
                        "--yes",
                        "--detach-sign",
                        "--armor",
                        some_asset,
                    ),
                    call(
                        "gpg",
                        "--default-key",
                        "0ED1E580",
                        "--yes",
                        "--detach-sign",
                        "--armor",
                        other_asset,
                    ),
                ]
            )

            github_releases_mock.upload_release_assets.assert_not_called()

    @patch("pontos.release.sign.cmd_runner", autospec=True)
    @patch("pontos.release.sign.SignCommand.download_asset", autospec=True)
    @patch("pontos.release.sign.SignCommand.download_tar", autospec=True)
    @patch("pontos.release.sign.SignCommand.download_zip", autospec=True)
    @patch("pontos.release.sign.GitHubAsyncRESTApi.releases", autospec=True)
    def test_sign_signature_failure(
        self,
        github_releases_mock: AsyncMock,
        download_zip_mock: AsyncMock,
        download_tar_mock: AsyncMock,
        download_asset_mock: AsyncMock,
        cmd_runner_mock: MagicMock,
    ):
        tar_file = Path("file.tar")
        zip_file = Path("file.zip")
        some_asset = Path("file1")
        other_asset = Path("file2")
        download_tar_mock.return_value = tar_file
        download_zip_mock.return_value = zip_file
        download_asset_mock.side_effect = [some_asset, other_asset]
        github_releases_mock.exists = AsyncMock(return_value=True)
        github_releases_mock.download_release_assets.return_value = (
            AsyncIteratorMock(
                [
                    (
                        "foo",
                        MagicMock(),
                    ),
                    ("bar", MagicMock()),
                ]
            )
        )
        process = AsyncMock(spec=Process, returncode=2)
        process.communicate.return_value = ("", b"An Error")
        cmd_runner_mock.return_value = process

        with temp_directory(change_into=True):
            _, token, args = parse_args(
                [
                    "sign",
                    "--repository",
                    "greenbone/foo",
                    "--release-version",
                    "1.2.3",
                ]
            )

            result = sign(
                terminal=mock_terminal(),
                error_terminal=mock_terminal(),
                args=args,
                token=token,
            )

            self.assertEqual(
                result, SignReturnValue.SIGNATURE_GENERATION_FAILED
            )

            cmd_runner_mock.assert_has_calls(
                [
                    call(
                        "gpg",
                        "--default-key",
                        "0ED1E580",
                        "--yes",
                        "--detach-sign",
                        "--armor",
                        zip_file,
                    ),
                ]
            )

            github_releases_mock.upload_release_assets.assert_not_called()

    @patch("pontos.release.sign.cmd_runner", autospec=True)
    @patch("pontos.release.sign.SignCommand.download_asset", autospec=True)
    @patch("pontos.release.sign.SignCommand.download_tar", autospec=True)
    @patch("pontos.release.sign.SignCommand.download_zip", autospec=True)
    @patch("pontos.release.sign.GitHubAsyncRESTApi.releases", autospec=True)
    def test_sign_upload_failure(
        self,
        github_releases_mock: AsyncMock,
        download_zip_mock: AsyncMock,
        download_tar_mock: AsyncMock,
        download_asset_mock: AsyncMock,
        cmd_runner_mock: MagicMock,
    ):
        tar_file = Path("file.tar")
        zip_file = Path("file.zip")
        some_asset = Path("file1")
        other_asset = Path("file2")
        download_tar_mock.return_value = tar_file
        download_zip_mock.return_value = zip_file
        download_asset_mock.side_effect = [some_asset, other_asset]
        github_releases_mock.exists = AsyncMock(return_value=True)
        github_releases_mock.download_release_assets.return_value = (
            AsyncIteratorMock(
                [
                    (
                        "foo",
                        MagicMock(),
                    ),
                    ("bar", MagicMock()),
                ]
            )
        )
        github_releases_mock.upload_release_assets.side_effect = (
            httpx.HTTPStatusError(
                "An error",
                request=MagicMock(spec=httpx.Request),
                response=MagicMock(spec=httpx.Response),
            )
        )
        process = AsyncMock(spec=Process, returncode=0)
        process.communicate.return_value = ("", "")
        cmd_runner_mock.return_value = process

        with temp_directory(change_into=True):
            _, token, args = parse_args(
                [
                    "sign",
                    "--repository",
                    "greenbone/foo",
                    "--release-version",
                    "1.2.3",
                ]
            )

            result = sign(
                terminal=mock_terminal(),
                error_terminal=mock_terminal(),
                args=args,
                token=token,
            )

            self.assertEqual(result, SignReturnValue.UPLOAD_ASSET_ERROR)

            cmd_runner_mock.assert_has_calls(
                [
                    call(
                        "gpg",
                        "--default-key",
                        "0ED1E580",
                        "--yes",
                        "--detach-sign",
                        "--armor",
                        zip_file,
                    ),
                    call(
                        "gpg",
                        "--default-key",
                        "0ED1E580",
                        "--yes",
                        "--detach-sign",
                        "--armor",
                        tar_file,
                    ),
                    call(
                        "gpg",
                        "--default-key",
                        "0ED1E580",
                        "--yes",
                        "--detach-sign",
                        "--armor",
                        some_asset,
                    ),
                    call(
                        "gpg",
                        "--default-key",
                        "0ED1E580",
                        "--yes",
                        "--detach-sign",
                        "--armor",
                        other_asset,
                    ),
                ]
            )

            github_releases_mock.upload_release_assets.assert_called_once_with(
                "greenbone/foo",
                "v1.2.3",
                [
                    (Path("file.zip.asc"), "application/pgp-signature"),
                    (Path("file.tar.asc"), "application/pgp-signature"),
                    (Path("file1.asc"), "application/pgp-signature"),
                    (Path("file2.asc"), "application/pgp-signature"),
                ],
            )
pontos-25.3.2/tests/release/v1.2.3.md000066400000000000000000000005311476255566300171020ustar00rootroot00000000000000# Changelog

All notable changes to this project will be documented in this file.

## [21.8.1] - 2021-08-23

## Added

* Need for commits. [1234567](https://github.com/foo/bar/commit/1234567)

## Changed

* fooooo. [1234568](https://github.com/foo/bar/commit/1234568)

[21.8.1]: https://github.com/y0urself/test_workflows/compare/21.8.0...21.8.1pontos-25.3.2/tests/terminal/000077500000000000000000000000001476255566300161255ustar00rootroot00000000000000pontos-25.3.2/tests/terminal/__init__.py000066400000000000000000000001411476255566300202320ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2020-2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later
#
pontos-25.3.2/tests/terminal/test_terminal.py000066400000000000000000000136151476255566300213570ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2019-2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later
#

# pylint: disable=invalid-name, protected-access

import unittest
from io import StringIO
from unittest.mock import MagicMock, patch

import colorful as cf

from pontos.terminal.terminal import ConsoleTerminal as Terminal
from pontos.terminal.terminal import Signs


class TerminalTestCase(unittest.TestCase):
    def setUp(self):
        self.maxDiff = 180
        # getting the bash-color-codes from the colorful module
        self.red = cf.red
        self.green = cf.green
        self.yellow = cf.yellow
        self.cyan = cf.cyan
        self.white = cf.white
        self.reset = cf.reset
        self.bold = cf.bold
        # every colors second value is the reset value ...
        self.term = Terminal()
        self.term.get_width = MagicMock(return_value=80)

    @patch("sys.stdout", new_callable=StringIO)
    def test_error(self, mock_stdout):
        status = f"{self.red(Signs.ERROR)} "
        msg = "foo bar"

        expected_msg = self.reset(f"{status}{msg}").styled_string + "\n"
        expected_len = len(expected_msg)

        self.term.error(msg)

        ret = mock_stdout.getvalue()

        self.assertEqual(ret, expected_msg)
        self.assertEqual(len(ret), expected_len)

    @patch("sys.stdout", new_callable=StringIO)
    def test_fail(self, mock_stdout):
        status = f"{self.red(Signs.FAIL)} "
        msg = "foo bar baz"

        expected_msg = self.reset(f"{status}{msg}").styled_string + "\n"
        expected_len = len(expected_msg)

        self.term.fail(msg)

        ret = mock_stdout.getvalue()

        self.assertEqual(ret, expected_msg)
        self.assertEqual(len(ret), expected_len)

    @patch("sys.stdout", new_callable=StringIO)
    def test_info(self, mock_stdout):
        status = f"{self.cyan(Signs.INFO)} "
        msg = "foo bar"

        expected_msg = self.reset(f"{status}{msg}").styled_string + "\n"
        expected_len = len(expected_msg)

        self.term.info(msg)

        ret = mock_stdout.getvalue()

        self.assertEqual(ret, expected_msg)
        self.assertEqual(len(ret), expected_len)

    @patch("sys.stdout", new_callable=StringIO)
    def test_info_with_newline(self, mock_stdout):
        status = f"{self.cyan(Signs.INFO)} "
        msg = "foo bar\nbaz"
        repl_msg = msg.replace("\n", "\n  ")

        expected_msg = self.reset(f"{status}{repl_msg}").styled_string + "\n"
        expected_len = len(expected_msg)

        self.term.info(msg)

        ret = mock_stdout.getvalue()

        self.assertEqual(ret, expected_msg)
        self.assertEqual(len(ret), expected_len)

    @patch("sys.stdout", new_callable=StringIO)
    def test_bold_info(self, mock_stdout):
        status = f"{self.cyan(Signs.INFO)} "
        msg = "bold foo bar"

        expected_msg = self.bold(f"{status}{msg}").styled_string + "\n"
        expected_len = len(expected_msg)

        self.term.bold_info(msg)

        ret = mock_stdout.getvalue()

        self.assertEqual(ret, expected_msg)
        self.assertEqual(len(ret), expected_len)

    @patch("sys.stdout", new_callable=StringIO)
    def test_ok(self, mock_stdout):
        status = f"{self.green(Signs.OK)} "
        msg = "foo bar"

        expected_msg = self.reset(f"{status}{msg}").styled_string + "\n"
        expected_len = len(expected_msg)

        self.term.ok(msg)

        ret = mock_stdout.getvalue()

        self.assertEqual(ret, expected_msg)
        self.assertEqual(len(ret), expected_len)

    @patch("sys.stdout", new_callable=StringIO)
    def test_warning(self, mock_stdout):
        msg = "foo bar"

        status = f"{self.yellow(Signs.WARNING)} "

        expected_msg = self.reset(f"{status}{msg}").styled_string + "\n"
        expected_len = len(expected_msg)

        self.term.warning(msg)

        ret = mock_stdout.getvalue()

        self.assertEqual(ret, expected_msg)
        self.assertEqual(len(ret), expected_len)

    @patch("sys.stdout", new_callable=StringIO)
    def test_print(self, mock_stdout):
        expected_msg = (
            self.reset(f'{self.white(" ")} foo bar').styled_string + "\n"
        )

        self.term.print("foo bar")

        ret = mock_stdout.getvalue()

        self.assertEqual(len(ret), len(expected_msg))
        self.assertEqual(ret, expected_msg)

    @patch("sys.stdout", new_callable=StringIO)
    def test_with_indent(self, mock_stdout):
        expected_msg = (
            self.reset(f'{self.white(" ")}   foo').styled_string + "\n"
        )

        with self.term.indent(2):
            self.term.print("foo")

            ret = mock_stdout.getvalue()

        self.assertEqual(len(ret), len(expected_msg))
        self.assertEqual(ret, expected_msg)

        # clear the buffer
        mock_stdout.truncate(0)
        mock_stdout.seek(0)

        expected_msg = self.reset(f'{self.white(" ")} bar').styled_string + "\n"
        self.term.print("bar")

        ret = mock_stdout.getvalue()

        self.assertEqual(len(ret), len(expected_msg))
        self.assertEqual(ret, expected_msg)

    @patch("sys.stdout", new_callable=StringIO)
    def test_long_msg(self, mock_stdout):
        long_msg = (
            "Lorem ipsum dolor sit amet, consetetur sadipscing elitr, "
            "sed diam nonumy eirmod tempor invidunt ut labore et dolore magna"
            " aliquyam erat, sed diam voluptua."
        )
        expected_msg = (
            self.reset(
                f'{self.white(" ")} Lorem ipsum dolor sit amet, consetetur '
                "sadipscing elitr, sed diam nonumy eirmo\n  d tempor invidunt "
                "ut labore et dolore magna aliquyam erat, sed diam voluptua."
            ).styled_string
            + "\n"
        )
        expected_len = len(expected_msg)

        self.term.print(long_msg)

        ret = mock_stdout.getvalue()

        self.assertEqual(ret, expected_msg)
        self.assertEqual(len(ret), expected_len)


if __name__ == "__main__":
    unittest.main()
pontos-25.3.2/tests/test-typing/000077500000000000000000000000001476255566300166015ustar00rootroot00000000000000pontos-25.3.2/tests/test-typing/__init__.py000066400000000000000000000001221476255566300207050ustar00rootroot00000000000000# Copyright (C) 2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later
#
pontos-25.3.2/tests/test-typing/test_typing.py000066400000000000000000000007461476255566300215330ustar00rootroot00000000000000# Copyright (C) 2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later
#

import unittest

from pontos.typing import SupportsStr


class SupportsStrTestCase(unittest.TestCase):
    def test_str(self):
        self.assertIsInstance("", SupportsStr)
        self.assertIsInstance(None, SupportsStr)

    def test_some_class(self):
        class Foo:
            def __str__(self) -> str:
                pass

        foo = Foo()

        self.assertIsInstance(foo, SupportsStr)
pontos-25.3.2/tests/test_enum.py000066400000000000000000000016341476255566300166730ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2024 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later


import unittest
from argparse import ArgumentTypeError

from pontos.enum import StrEnum, enum_type


class EnumTypeTestCase(unittest.TestCase):
    def test_enum_type(self):
        class FooEnum(StrEnum):
            ALL = "all"
            NONE = "none"

        func = enum_type(FooEnum)

        self.assertEqual(func("all"), FooEnum.ALL)
        self.assertEqual(func("none"), FooEnum.NONE)

        self.assertEqual(func(FooEnum.ALL), FooEnum.ALL)
        self.assertEqual(func(FooEnum.NONE), FooEnum.NONE)

    def test_enum_type_error(self):
        class FooEnum(StrEnum):
            ALL = "all"
            NONE = "none"

        func = enum_type(FooEnum)

        with self.assertRaisesRegex(
            ArgumentTypeError,
            r"invalid value foo. Expected one of all, none",
        ):
            func("foo")
pontos-25.3.2/tests/test_helper.py000066400000000000000000000470071476255566300172120ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later
#

# pylint: disable=redefined-builtin,disallowed-name
# ruff: noqa: E501

import unittest
from enum import Enum
from pathlib import Path
from unittest.mock import MagicMock, call, patch

import httpx

from pontos.errors import PontosError
from pontos.helper import (
    DEFAULT_TIMEOUT,
    AsyncDownloadProgressIterable,
    DownloadProgressIterable,
    add_sys_path,
    deprecated,
    download,
    download_async,
    ensure_unload_module,
    enum_or_value,
    parse_timedelta,
    snake_case,
    unload_module,
)
from pontos.testing import temp_file, temp_python_module
from tests import (
    AsyncIteratorMock,
    AsyncMock,
    IsolatedAsyncioTestCase,
    aiter,
    anext,
)
from tests.github.api import create_response


class AsyncDownloadProgressIterableTestCase(IsolatedAsyncioTestCase):
    def test_properties(self):
        content_iterator = AsyncIteratorMock(["1", "2"])

        download_iterable = AsyncDownloadProgressIterable(
            content_iterator=content_iterator, length=2, url="https://foo.bar"
        )

        self.assertEqual(download_iterable.length, 2)
        self.assertEqual(download_iterable.url, "https://foo.bar")

    async def test_download_progress(self):
        content_iterator = AsyncIteratorMock(["1", "2"])

        download_iterable = AsyncDownloadProgressIterable(
            content_iterator=content_iterator, length=2, url="https://foo.bar"
        )

        it = aiter(download_iterable)
        content, progress = await anext(it)

        self.assertEqual(content, "1")
        self.assertEqual(progress, 50)

        content, progress = await anext(it)
        self.assertEqual(content, "2")
        self.assertEqual(progress, 100)

    async def test_download_progress_without_length(self):
        content_iterator = AsyncIteratorMock(["1", "2"])

        download_iterable = AsyncDownloadProgressIterable(
            content_iterator=content_iterator,
            length=None,
            url="https://foo.bar",
        )

        it = aiter(download_iterable)
        content, progress = await anext(it)

        self.assertEqual(content, "1")
        self.assertIsNone(progress)

        content, progress = await anext(it)
        self.assertEqual(content, "2")
        self.assertIsNone(progress)


class DownloadAsyncTestCase(IsolatedAsyncioTestCase):
    async def test_download_async(self):
        response = create_response(url="http://some.url")
        response.aiter_bytes.return_value = AsyncIteratorMock(["1", "2"])
        stream = AsyncMock()
        stream.__aenter__.return_value = response

        async with download_async(
            stream, content_length=2
        ) as download_iterable:
            self.assertEqual(download_iterable.url, "http://some.url")

            it = aiter(download_iterable)
            content, progress = await anext(it)

            self.assertEqual(content, "1")
            self.assertEqual(progress, 50)

            content, progress = await anext(it)
            self.assertEqual(content, "2")
            self.assertEqual(progress, 100)

    async def test_download_async_content_length(self):
        response = create_response(headers=MagicMock(), url="http://some.url")
        response.aiter_bytes.return_value = AsyncIteratorMock(["1", "2"])
        response.headers.get.return_value = 2
        stream = AsyncMock()
        stream.__aenter__.return_value = response

        async with download_async(stream) as download_iterable:
            self.assertEqual(download_iterable.url, "http://some.url")

            it = aiter(download_iterable)
            content, progress = await anext(it)

            self.assertEqual(content, "1")
            self.assertEqual(progress, 50)

            content, progress = await anext(it)
            self.assertEqual(content, "2")
            self.assertEqual(progress, 100)

    async def test_download_async_url(self):
        response = create_response(url="http://some.url")
        response.aiter_bytes.return_value = AsyncIteratorMock(["1", "2"])
        stream = AsyncMock()
        stream.__aenter__.return_value = response

        async with download_async(
            stream,
            url="http://foo.bar",
            content_length=2,
        ) as download_iterable:
            self.assertEqual(download_iterable.url, "http://foo.bar")

            it = aiter(download_iterable)
            content, progress = await anext(it)

            self.assertEqual(content, "1")
            self.assertEqual(progress, 50)

            content, progress = await anext(it)
            self.assertEqual(content, "2")
            self.assertEqual(progress, 100)

    async def test_download_async_failure(self):
        response = create_response()
        response.raise_for_status.side_effect = httpx.HTTPStatusError(
            "404", request=MagicMock(), response=response
        )
        stream = AsyncMock()
        stream.__aenter__.return_value = response

        with self.assertRaises(httpx.HTTPStatusError):
            async with download_async(stream, content_length=2):
                pass


class DownloadProgressIterableTestCase(unittest.TestCase):
    def test_properties(self):
        content = ["foo", "bar", "baz"]
        destination = Path("foo")
        length = 123
        url = "bar"
        download_progress = DownloadProgressIterable(
            content_iterator=content,
            destination=destination,
            length=length,
            url=url,
        )

        self.assertEqual(download_progress.url, url)
        self.assertEqual(download_progress.length, length)
        self.assertEqual(download_progress.destination, destination)

    def test_progress_without_length(self):
        content = ["foo", "bar", "baz"]
        destination = MagicMock()
        writer = MagicMock()
        context_manager = MagicMock()
        context_manager.__enter__.return_value = writer
        destination.open.return_value = context_manager
        download_progress = DownloadProgressIterable(
            content_iterator=content,
            destination=destination,
            length=None,
            url="foo",
        )

        self.assertEqual(download_progress.url, "foo")

        it = iter(download_progress)
        progress = next(it)
        writer.write.assert_called_with("foo")
        self.assertIsNone(progress)
        progress = next(it)
        writer.write.assert_called_with("bar")
        self.assertIsNone(progress)
        progress = next(it)
        writer.write.assert_called_with("baz")
        self.assertIsNone(progress)

        with self.assertRaises(StopIteration):
            next(it)

    def test_progress(self):
        content = ["foo", "bar", "baz"]
        destination = MagicMock()
        writer = MagicMock()
        context_manager = MagicMock()
        context_manager.__enter__.return_value = writer
        destination.open.return_value = context_manager
        download_progress = DownloadProgressIterable(
            content_iterator=content,
            destination=destination,
            length=9,
            url="foo",
        )

        it = iter(download_progress)
        progress = next(it)
        writer.write.assert_called_with("foo")
        self.assertEqual(progress, 1 / 3.0)
        progress = next(it)
        writer.write.assert_called_with("bar")
        self.assertEqual(progress, 2 / 3.0)
        progress = next(it)
        writer.write.assert_called_with("baz")
        self.assertEqual(progress, 1)

        with self.assertRaises(StopIteration):
            next(it)

    def test_run(self):
        content = ["foo", "bar", "baz"]
        destination = MagicMock()
        writer = MagicMock()
        context_manager = MagicMock()
        context_manager.__enter__.return_value = writer
        destination.open.return_value = context_manager
        download_progress = DownloadProgressIterable(
            content_iterator=content,
            destination=destination,
            length=9,
            url="foo",
        )

        download_progress.run()
        destination.open.assert_called_once()
        writer.write.assert_has_calls((call("foo"), call("bar"), call("baz")))


class DownloadTestCase(unittest.TestCase):
    @patch("pontos.github.api.api.httpx.stream")
    def test_download_without_destination(
        self,
        requests_mock: MagicMock,
    ):
        response = MagicMock()
        response.iter_bytes.return_value = [b"foo", b"bar", b"baz"]
        response_headers = MagicMock()
        response.headers = response_headers
        response_headers.get.return_value = None
        response_stream = MagicMock()
        response_stream.__enter__.return_value = response
        requests_mock.return_value = response_stream

        with download(
            "https://github.com/greenbone/pontos/archive/refs/tags/v21.11.0.tar.gz"  # pylint: disable=line-too-long
        ) as download_progress:
            requests_mock.assert_called_once_with(
                "GET",
                "https://github.com/greenbone/pontos/archive/refs/tags/v21.11.0.tar.gz",  # pylint: disable=line-too-long
                follow_redirects=True,
                timeout=DEFAULT_TIMEOUT,
                headers=None,
                params=None,
            )
            response_headers.get.assert_called_once_with("content-length")

            self.assertIsNone(download_progress.length)
            self.assertEqual(
                download_progress.destination, Path("v21.11.0.tar.gz")
            )

            it = iter(download_progress)
            progress = next(it)
            self.assertIsNone(progress)
            progress = next(it)
            self.assertIsNone(progress)
            progress = next(it)
            self.assertIsNone(progress)

            with self.assertRaises(StopIteration):
                next(it)

            download_progress.destination.unlink()

    @patch("pontos.helper.Path")
    @patch("pontos.github.api.api.httpx.stream")
    def test_download_with_content_length(
        self, requests_mock: MagicMock, path_mock: MagicMock
    ):
        response = MagicMock()
        response.iter_bytes.return_value = [b"foo", b"bar", b"baz"]
        response_headers = MagicMock()
        response.headers = response_headers
        response_headers.get.return_value = "9"
        response_stream = MagicMock()
        response_stream.__enter__.return_value = response
        requests_mock.return_value = response_stream

        download_file = path_mock()
        file_mock = MagicMock()
        file_mock.__enter__.return_value = file_mock
        download_file.open.return_value = file_mock

        with download(
            "https://github.com/greenbone/pontos/archive/refs/tags/v21.11.0.tar.gz",  # pylint: disable=line-too-long
            download_file,
        ) as download_progress:
            requests_mock.assert_called_once_with(
                "GET",
                "https://github.com/greenbone/pontos/archive/refs/tags/v21.11.0.tar.gz",  # pylint: disable=line-too-long
                timeout=DEFAULT_TIMEOUT,
                follow_redirects=True,
                headers=None,
                params=None,
            )
            response_headers.get.assert_called_once_with("content-length")

            self.assertEqual(download_progress.length, 9)

            it = iter(download_progress)

            progress = next(it)
            self.assertEqual(progress, 1 / 3)
            file_mock.write.assert_called_with(b"foo")

            progress = next(it)
            self.assertEqual(progress, 2 / 3)
            file_mock.write.assert_called_with(b"bar")

            progress = next(it)
            self.assertEqual(progress, 1)
            file_mock.write.assert_called_with(b"baz")

            with self.assertRaises(StopIteration):
                next(it)


class DeprecatedTestCase(unittest.TestCase):
    def test_function(self):
        @deprecated
        def foo():
            pass

        with self.assertWarnsRegex(DeprecationWarning, "foo is deprecated"):
            foo()

        @deprecated()
        def foo2():
            pass

        with self.assertWarnsRegex(DeprecationWarning, "foo2 is deprecated"):
            foo2()

    def test_function_with_since(self):
        @deprecated(since="1.2.3")
        def foo():
            pass

        with self.assertWarnsRegex(
            DeprecationWarning, "deprecated since version 1.2.3"
        ):
            foo()

    def test_function_with_reason(self):
        @deprecated("Because it is obsolete.")
        def foo():
            pass

        with self.assertWarnsRegex(
            DeprecationWarning, "Because it is obsolete"
        ):
            foo()

        @deprecated(reason="Because it is obsolete.")
        def foo2():
            pass

        with self.assertWarnsRegex(
            DeprecationWarning, "Because it is obsolete"
        ):
            foo2()

    def test_class(self):
        @deprecated
        class Foo:
            pass

        with self.assertWarnsRegex(DeprecationWarning, "Foo is deprecated"):
            Foo()

        @deprecated()
        class Foo2:
            pass

        with self.assertWarnsRegex(DeprecationWarning, "Foo2 is deprecated"):
            Foo2()

    def test_class_with_since(self):
        @deprecated(since="1.2.3")
        class Foo:
            pass

        with self.assertWarnsRegex(
            DeprecationWarning, "deprecated since version 1.2.3"
        ):
            Foo()

    def test_class_with_reason(self):
        @deprecated("Because it is obsolete.")
        class Foo:
            pass

        with self.assertWarnsRegex(
            DeprecationWarning, "Because it is obsolete"
        ):
            Foo()

        @deprecated(reason="Because it is obsolete.")
        class Foo2:
            pass

        with self.assertWarnsRegex(
            DeprecationWarning, "Because it is obsolete"
        ):
            Foo2()

    def test_method(self):
        class Foo:
            @deprecated
            def bar(self):
                pass

        with self.assertWarnsRegex(DeprecationWarning, "bar is deprecated"):
            Foo().bar()

        class Foo2:
            @deprecated
            def bar(self):
                pass

        with self.assertWarnsRegex(DeprecationWarning, "bar is deprecated"):
            Foo2().bar()

    def test_method_with_since(self):
        class Foo:
            @deprecated(since="1.2.3")
            def bar(self):
                pass

        with self.assertWarnsRegex(
            DeprecationWarning, "deprecated since version 1.2.3"
        ):
            Foo().bar()

    def test_method_with_reason(self):
        class Foo:
            @deprecated("Because it is obsolete.")
            def bar(self):
                pass

        with self.assertWarnsRegex(
            DeprecationWarning, "Because it is obsolete"
        ):
            Foo().bar()

        class Foo2:
            @deprecated(reason="Because it is obsolete.")
            def bar(self):
                pass

        with self.assertWarnsRegex(
            DeprecationWarning, "Because it is obsolete"
        ):
            Foo2().bar()


class AddSysPathTestCase(unittest.TestCase):
    def test_add_sys_path(self):
        with self.assertRaises(ImportError):
            # pylint: disable=import-error,import-outside-toplevel,unused-import
            import mymodule  # noqa: F811,F401

        with (
            temp_file("", name="mymodule.py") as module_path,
            add_sys_path(module_path.parent),
        ):
            # pylint: disable=import-error,import-outside-toplevel,unused-import
            import mymodule  # noqa: F811,F401

        unload_module("mymodule")


class EnsureUnloadModuleTestCase(unittest.TestCase):
    def test_ensure_unload_module(self):
        with (
            temp_python_module("def foo():\n  pass", name="bar"),
            ensure_unload_module("bar"),
        ):
            # pylint: disable=import-error,import-outside-toplevel,unused-import
            import bar  # noqa: F401,F811

        with self.assertRaises(ImportError):
            # pylint: disable=import-error,import-outside-toplevel,unused-import
            import bar  # noqa: F401,F811

    def test_ensure_unload_module_exception(self):
        with self.assertRaisesRegex(ValueError, "Ipsum"):
            with (
                temp_python_module(
                    "def func():\n  raise ValueError('Ipsum')", name="bar"
                ),
                ensure_unload_module("bar"),
            ):
                # pylint: disable=import-error,import-outside-toplevel,unused-import
                import bar

                bar.func()

        with self.assertRaises(ImportError):
            # pylint: disable=import-error,import-outside-toplevel,unused-import
            import bar

    def test_add_sys_path_exception(self):
        with self.assertRaises(ImportError):
            # pylint: disable=import-error,import-outside-toplevel,unused-import
            import mymodule  # noqa: F811,F401

        try:
            with (
                temp_file("", name="mymodule.py") as module_path,
                add_sys_path(module_path.parent),
            ):
                # pylint: disable=import-error,import-outside-toplevel,unused-import
                import mymodule  # noqa: F811,F401

                raise ValueError()
        except ValueError:
            pass
        finally:
            unload_module("mymodule")

        with self.assertRaises(ImportError):
            # pylint: disable=import-error,import-outside-toplevel,unused-import
            import mymodule  # noqa: F401,F811


class SnakeCaseTestCase(unittest.TestCase):
    def test_snake_case(self):
        self.assertEqual(snake_case("CamelCase"), "camel_case")
        self.assertEqual(snake_case("camelCase"), "camel_case")
        self.assertEqual(snake_case("snakecase"), "snakecase")
        self.assertEqual(snake_case("snake_case"), "snake_case")


class EnumOrValueTestCase(unittest.TestCase):
    def test_value(self):
        self.assertEqual(enum_or_value(None), None)
        self.assertEqual(enum_or_value("foo"), "foo")
        self.assertEqual(enum_or_value(123), 123)

    def test_enum(self):
        class Foo(Enum):
            BAR = "bar"
            BAZ = "baz"

        self.assertEqual(enum_or_value(Foo.BAR), "bar")
        self.assertEqual(enum_or_value(Foo.BAZ), "baz")


class ParseTimedeltaTestCase(unittest.TestCase):
    def test_parse_complex(self):
        delta = parse_timedelta("1w2d4h5m6s")

        self.assertEqual(delta.days, 9)
        self.assertEqual(delta.seconds, 14706)

    def test_parse_weeks(self):
        delta = parse_timedelta("1w")
        self.assertEqual(delta.days, 7)
        self.assertEqual(delta.seconds, 0)

        delta = parse_timedelta("1.5w")
        self.assertEqual(delta.days, 10)
        self.assertEqual(delta.seconds, 43200)

    def test_parse_days(self):
        delta = parse_timedelta("1d")
        self.assertEqual(delta.days, 1)
        self.assertEqual(delta.seconds, 0)

        delta = parse_timedelta("1.5d")
        self.assertEqual(delta.days, 1)
        self.assertEqual(delta.seconds, 43200)

    def test_parse_hours(self):
        delta = parse_timedelta("1h")
        self.assertEqual(delta.days, 0)
        self.assertEqual(delta.seconds, 3600)

        delta = parse_timedelta("1.5h")
        self.assertEqual(delta.days, 0)
        self.assertEqual(delta.seconds, 5400)

    def test_parse_error(self):
        with self.assertRaises(PontosError):
            parse_timedelta("foo")

        with self.assertRaises(PontosError):
            parse_timedelta("1d2x")

        with self.assertRaises(PontosError):
            parse_timedelta("1,2d")
pontos-25.3.2/tests/test_pontos.py000066400000000000000000000020221476255566300172410ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later
#

import unittest
from unittest.mock import patch

from pontos import main
from pontos.version import __version__


class TestPontos(unittest.TestCase):
    @patch("pontos.pontos.RichTerminal")
    def test_pontos(self, terminal_mock):
        main()

        terminal_mock.return_value.print.assert_called()
        terminal_mock.return_value.indent.assert_called()
        terminal_mock.return_value.bold_info.assert_called()
        terminal_mock.return_value.info.assert_called()
        terminal_mock.return_value.warning.assert_called_once_with(
            'Use the listed commands "help" for more information '
            "and arguments description."
        )

    @patch("pontos.pontos.RichTerminal")
    @patch("sys.argv", ["pontos", "--version"])
    def test_pontos_version(self, terminal_mock):
        main()

        terminal_mock.return_value.print.assert_called_once_with(
            f"pontos version {__version__}"
        )
pontos-25.3.2/tests/testing/000077500000000000000000000000001476255566300157675ustar00rootroot00000000000000pontos-25.3.2/tests/testing/__init__.py000066400000000000000000000001411476255566300200740ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later
#
pontos-25.3.2/tests/testing/test_testing.py000066400000000000000000000170051476255566300210600ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2022-2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later
#

import struct
import unittest
from pathlib import Path

from pontos.git import Git
from pontos.helper import unload_module
from pontos.testing import (
    temp_directory,
    temp_file,
    temp_git_repository,
    temp_python_module,
)


class TempDirectoryTestCase(unittest.TestCase):
    def test_temp_directory(self):
        with temp_directory() as tmp_dir:
            tmp_file = tmp_dir / "test.txt"
            tmp_file.write_text("Lorem Ipsum", encoding="utf8")

            self.assertTrue(tmp_dir.exists())
            self.assertTrue(tmp_file.exists())

        self.assertFalse(tmp_dir.exists())
        self.assertFalse(tmp_file.exists())

    def test_temp_directory_exception(self):
        try:
            with temp_directory() as tmp_dir:
                tmp_file = tmp_dir / "test.txt"
                tmp_file.write_text("Lorem Ipsum", encoding="utf8")

                self.assertTrue(tmp_dir.exists())
                self.assertTrue(tmp_file.exists())
                raise ValueError()
        except ValueError:
            pass
        finally:
            self.assertFalse(tmp_dir.exists())
            self.assertFalse(tmp_file.exists())

    def test_temp_directory_change_into(self):
        old_cwd = Path.cwd()
        with temp_directory(change_into=True) as tmp_dir:
            new_cwd = Path.cwd()

            self.assertEqual(new_cwd, tmp_dir.resolve())
            self.assertNotEqual(old_cwd, new_cwd)

    def test_temp_directory_add_to_sys_path(self):
        with self.assertRaises(ImportError):
            # pylint: disable=import-error,import-outside-toplevel,unused-import
            import mymodule2  # noqa: F401,F811

        with temp_directory(add_to_sys_path=True) as module_path:
            mymodule_file = module_path / "mymodule2.py"
            mymodule_file.touch()

            # pylint: disable=import-error,import-outside-toplevel,unused-import
            import mymodule2  # noqa: F401,F811

        unload_module("mymodule")


class TempGitRepositoryTestCase(unittest.TestCase):
    def test_temp_git_repository(self):
        with temp_git_repository(branch="foo") as git_directory:
            test_file = git_directory / "test.txt"
            test_file.write_text("Lorem Ipsum", encoding="utf8")

            self.assertTrue(git_directory.exists())
            self.assertTrue(test_file.exists())

            git = Git(git_directory)
            git.add(test_file)

        self.assertFalse(git_directory.exists())
        self.assertFalse(test_file.exists())

    def test_temp_git_repository_exception(self):
        try:
            with temp_git_repository(branch="foo") as git_directory:
                test_file = git_directory / "test.txt"
                test_file.write_text("Lorem Ipsum", encoding="utf8")

                self.assertTrue(git_directory.exists())
                self.assertTrue(test_file.exists())

                git = Git(git_directory)
                git.add(test_file)

                raise ValueError()
        except ValueError:
            pass
        finally:
            self.assertFalse(git_directory.exists())
            self.assertFalse(test_file.exists())


class TempFileTestCase(unittest.TestCase):
    def test_temp_file(self):
        with temp_file("my content") as test_file:
            self.assertTrue(test_file.exists())
            self.assertEqual("my content", test_file.read_text(encoding="utf8"))

        self.assertFalse(test_file.exists())

    def test_temp_binary_file(self):
        data = struct.pack(">if", 42, 2.71828182846)
        with temp_file(data) as test_file:
            self.assertTrue(test_file.exists())
            self.assertEqual(data, test_file.read_bytes())

        self.assertFalse(test_file.exists())

    def test_temp_file_without_content(self):
        with temp_file(name="foo.bar") as test_file:
            self.assertTrue(test_file.exists())
            self.assertTrue(test_file.is_file())
            self.assertEqual("", test_file.read_text(encoding="utf8"))

        self.assertFalse(test_file.exists())

    def test_temp_file_exception(self):
        try:
            with temp_file("my content") as test_file:
                self.assertTrue(test_file.exists())
                self.assertEqual(
                    "my content", test_file.read_text(encoding="utf8")
                )

                raise ValueError()

        except ValueError:
            pass
        finally:
            self.assertFalse(test_file.exists())

    def test_temp_file_name(self):
        with temp_file("my content", name="foo.txt") as test_file:
            self.assertTrue(test_file.exists())
            self.assertEqual(test_file.name, "foo.txt")
            self.assertEqual("my content", test_file.read_text(encoding="utf8"))

        self.assertFalse(test_file.exists())

    def test_temp_file_change_into(self):
        old_cwd = Path.cwd()
        with temp_file("my content", change_into=True) as test_file:
            new_cwd = Path.cwd()

            self.assertTrue(test_file.exists())
            self.assertEqual("my content", test_file.read_text(encoding="utf8"))

            self.assertEqual(new_cwd, test_file.parent.resolve())
            self.assertNotEqual(old_cwd, new_cwd)

        self.assertFalse(test_file.exists())


class TempPythonModuleTestCase(unittest.TestCase):
    def test_temp_python_module(self):
        with self.assertRaises(ImportError):
            # pylint: disable=import-error,import-outside-toplevel,unused-import
            import foo  # noqa: F401,F811

        with temp_python_module("def foo():\n  pass") as module_path:
            self.assertTrue(module_path.exists())

            # pylint: disable=import-error,import-outside-toplevel,unused-import
            import foo  # noqa: F401,F811

        self.assertFalse(module_path.exists())

        with self.assertRaises(ImportError):
            # pylint: disable=import-error,import-outside-toplevel,unused-import
            import foo  # noqa: F401,F811

    def test_temp_python_module_exception(self):
        with self.assertRaises(ImportError):
            # pylint: disable=import-error,import-outside-toplevel,unused-import
            import foo  # noqa: F401,F811

        try:
            with temp_python_module("def foo():\n  pass") as module_path:
                self.assertTrue(module_path.exists())

                # pylint: disable=import-error,import-outside-toplevel,unused-import # noqa: E501
                import foo  # noqa: F401,F811

                raise ValueError()
        except ValueError:
            pass
        finally:
            self.assertFalse(module_path.exists())

        with self.assertRaises(ImportError):
            # pylint: disable=import-error,import-outside-toplevel,unused-import
            import foo  # noqa: F401,F811

    def test_temp_python_module_name(self):
        with self.assertRaises(ImportError):
            # pylint: disable=import-error,import-outside-toplevel,unused-import
            import mymodule3  # noqa: F401,F811

        with temp_python_module(
            "def foo():\n  pass", name="mymodule3"
        ) as module_path:
            self.assertTrue(module_path.exists())

            # pylint: disable=import-error,import-outside-toplevel,unused-import
            import mymodule3  # noqa: F401,F811

        self.assertFalse(module_path.exists())

        with self.assertRaises(ImportError):
            # pylint: disable=import-error,import-outside-toplevel,unused-import
            import mymodule3  # noqa: F401,F811
pontos-25.3.2/tests/updateheader/000077500000000000000000000000001476255566300167455ustar00rootroot00000000000000pontos-25.3.2/tests/updateheader/__init__.py000066400000000000000000000001411476255566300210520ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2020-2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later
#
pontos-25.3.2/tests/updateheader/test_header.py000066400000000000000000000522071476255566300216140ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2020-2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later
#

import datetime
import struct
from argparse import Namespace
from contextlib import redirect_stdout
from io import StringIO
from pathlib import Path
from unittest import TestCase
from unittest.mock import MagicMock, patch

from pontos.errors import PontosError
from pontos.testing import temp_directory, temp_file
from pontos.updateheader.updateheader import _add_header as add_header
from pontos.updateheader.updateheader import (
    _compile_copyright_regex,
    main,
    parse_args,
    update_file,
)
from pontos.updateheader.updateheader import (
    _compile_outdated_regex as compile_outdated_regex,
)
from pontos.updateheader.updateheader import _find_copyright as find_copyright
from pontos.updateheader.updateheader import (
    _get_exclude_list as get_exclude_list,
)
from pontos.updateheader.updateheader import (
    _get_modified_year as get_modified_year,
)
from pontos.updateheader.updateheader import (
    _remove_outdated_lines as remove_outdated_lines,
)

HEADER = """# SPDX-FileCopyrightText: {date} Greenbone AG
#
# SPDX-License-Identifier: AGPL-3.0-or-later"""


class GetModifiedYearTestCase(TestCase):
    @patch("pontos.updateheader.updateheader.Git")
    def test_get_modified_year(self, git_mock):
        with temp_file(name="test.py", change_into=True) as test_file:
            git_instance_mock = MagicMock()
            git_instance_mock.log.return_value = ["2020"]
            git_mock.return_value = git_instance_mock

            year = get_modified_year(f=test_file)
            self.assertEqual(year, "2020")

    def test_get_modified_year_error(self):
        with (
            temp_directory(change_into=True) as temp_dir,
            self.assertRaises(PontosError),
        ):
            test_file = temp_dir / "test.py"

            get_modified_year(f=test_file)


class FindCopyRightTestCase(TestCase):
    def setUp(self) -> None:
        self.company = "Greenbone AG"
        self.regex = _compile_copyright_regex()

    def test_find_copyright(self):
        test_line = "# Copyright (C) 1995-2021 Greenbone AG"
        test2_line = "# Copyright (C) 1995 Greenbone AG"
        invalid_line = (
            "# This program is free software: "
            "you can redistribute it and/or modify"
        )

        # Full match
        found, match = find_copyright(
            copyright_regex=self.regex, line=test_line
        )
        self.assertTrue(found)
        self.assertIsNotNone(match)
        self.assertEqual(match.creation_year, "1995")
        self.assertEqual(match.modification_year, "2021")
        self.assertEqual(match.company, self.company)

        # No modification Date
        found, match = find_copyright(
            copyright_regex=self.regex, line=test2_line
        )
        self.assertTrue(found)
        self.assertIsNotNone(match)
        self.assertEqual(match.creation_year, "1995")
        self.assertIsNone(match.modification_year)
        self.assertEqual(match.company, self.company)

        # No match
        found, match = find_copyright(
            copyright_regex=self.regex, line=invalid_line
        )
        self.assertFalse(found)
        self.assertIsNone(match)

    def test_find_spdx_copyright(self):
        test_line = "# SPDX-FileCopyrightText: 1995-2021 Greenbone AG"
        test2_line = "# SPDX-FileCopyrightText: 1995 Greenbone AG"
        invalid_line = (
            "# This program is free software: "
            "you can redistribute it and/or modify"
        )

        # Full match
        found, match = find_copyright(
            copyright_regex=self.regex, line=test_line
        )
        self.assertTrue(found)
        self.assertIsNotNone(match)
        self.assertEqual(match.creation_year, "1995")
        self.assertEqual(match.modification_year, "2021")
        self.assertEqual(match.company, self.company)

        # No modification Date
        found, match = find_copyright(
            copyright_regex=self.regex, line=test2_line
        )
        self.assertTrue(found)
        self.assertIsNotNone(match)
        self.assertEqual(match.creation_year, "1995")
        self.assertIsNone(match.modification_year)
        self.assertEqual(match.company, self.company)

        # No match
        found, match = find_copyright(
            copyright_regex=self.regex, line=invalid_line
        )
        self.assertFalse(found)
        self.assertIsNone(match)


class AddHeaderTestCase(TestCase):
    def setUp(self):
        self.company = "Greenbone AG"

    def test_add_header(self):
        expected_header = HEADER.format(date="2021") + "\n"

        header = add_header(
            suffix=".py",
            license_id="AGPL-3.0-or-later",
            company=self.company,
            year="2021",
        )

        self.assertEqual(header, expected_header)

    def test_add_header_wrong_file_suffix(self):
        with self.assertRaises(ValueError):
            add_header(
                suffix=".prr",
                license_id="AGPL-3.0-or-later",
                company=self.company,
                year="2021",
            )

    def test_add_header_license_not_found(self):
        with self.assertRaises(FileNotFoundError):
            add_header(
                suffix=".py",
                license_id="AAAGPL-3.0-or-later",
                company=self.company,
                year="2021",
            )


class UpdateFileTestCase(TestCase):
    maxDiff = None

    def setUp(self):
        self.company = "Greenbone AG"

        self.path = Path(__file__).parent

    @patch("sys.stdout", new_callable=StringIO)
    def test_update_file_not_existing(self, mock_stdout):
        year = "2020"
        license_id = "AGPL-3.0-or-later"

        with temp_directory(change_into=True) as temp_dir:
            test_file = temp_dir / "test.py"

            with self.assertRaises(FileNotFoundError):
                update_file(
                    test_file,
                    year,
                    license_id,
                    self.company,
                )

            ret = mock_stdout.getvalue()
            self.assertEqual(
                ret,
                f"{test_file}: File is not existing.\n",
            )

    @patch("sys.stdout", new_callable=StringIO)
    def test_update_file_wrong_license(self, mock_stdout):
        year = "2020"
        license_id = "AAAGPL-3.0-or-later"

        with temp_file(name="test.py", change_into=True) as test_file:
            update_file(
                test_file,
                year,
                license_id,
                self.company,
            )

            ret = mock_stdout.getvalue()
            self.assertEqual(
                ret,
                f"{test_file}: License file for "
                "AAAGPL-3.0-or-later is not existing.\n",
            )

    @patch("sys.stdout", new_callable=StringIO)
    def test_update_file_suffix_invalid(self, mock_stdout):
        year = "2020"
        license_id = "AGPL-3.0-or-later"

        with temp_file(name="test.pppy", change_into=True) as test_file:
            update_file(
                test_file,
                year,
                license_id,
                self.company,
            )

            ret = mock_stdout.getvalue()
            self.assertEqual(
                ret,
                f"{test_file}: No license header for the format .pppy found.\n",
            )

    @patch("sys.stdout", new_callable=StringIO)
    def test_update_file_binary_file(self, mock_stdout):
        year = "2020"
        license_id = "AGPL-3.0-or-later"

        # create a Binary file ...
        # https://stackoverflow.com/a/30148554
        data = struct.pack(">if", 42, 2.71828182846)

        with (
            temp_file(name="test.py", content=data) as test_file,
            self.assertRaises(UnicodeDecodeError),
        ):
            update_file(
                test_file,
                year,
                license_id,
                self.company,
            )

        ret = mock_stdout.getvalue()
        self.assertEqual(
            ret,
            f"{test_file}: Ignoring binary file.\n",
        )

    @patch("sys.stdout", new_callable=StringIO)
    def test_update_create_header(self, mock_stdout):
        year = "1995"
        license_id = "AGPL-3.0-or-later"

        expected_header = HEADER.format(date="1995") + "\n\n"

        with temp_file(name="test.py", change_into=True) as test_file:
            update_file(
                test_file,
                year,
                license_id,
                self.company,
            )

            ret = mock_stdout.getvalue()
            self.assertEqual(
                f"{test_file}: Added license header.\n",
                ret,
            )
            self.assertEqual(
                expected_header, test_file.read_text(encoding="utf-8")
            )

    @patch("sys.stdout", new_callable=StringIO)
    def test_update_create_header_single_year(self, mock_stdout):
        year = "1995"
        license_id = "AGPL-3.0-or-later"

        expected_header = HEADER.format(date="1995") + "\n\n"

        with temp_file(name="test.py", change_into=True) as test_file:
            update_file(
                test_file, year, license_id, self.company, single_year=True
            )
            ret = mock_stdout.getvalue()
            self.assertEqual(
                f"{test_file}: Added license header.\n",
                ret,
            )
            self.assertEqual(
                expected_header, test_file.read_text(encoding="utf-8")
            )

    @patch("sys.stdout", new_callable=StringIO)
    def test_update_header_in_file(self, mock_stdout):
        year = "2021"
        license_id = "AGPL-3.0-or-later"

        header = HEADER.format(date="2020")
        with temp_file(
            content=header, name="test.py", change_into=True
        ) as test_file:
            update_file(
                test_file,
                year,
                license_id,
                self.company,
            )

            ret = mock_stdout.getvalue()
            self.assertEqual(
                ret,
                f"{test_file}: Changed License Header "
                "Copyright Year None -> 2021\n",
            )
            self.assertIn(
                "# SPDX-FileCopyrightText: 2020-2021 Greenbone AG",
                test_file.read_text(encoding="utf-8"),
            )

    @patch("sys.stdout", new_callable=StringIO)
    def test_update_header_in_file_single_year(self, mock_stdout):
        year = "2021"
        license_id = "AGPL-3.0-or-later"

        header = HEADER.format(date="2020-2021")
        with temp_file(
            content=header, name="test.py", change_into=True
        ) as test_file:
            update_file(
                test_file,
                year,
                license_id,
                self.company,
                single_year=True,
            )

            ret = mock_stdout.getvalue()
            self.assertEqual(
                ret,
                f"{test_file}: Changed License Header Copyright Year format to single year "
                "2020-2021 -> 2020\n",
            )

            self.assertIn(
                "# SPDX-FileCopyrightText: 2020 Greenbone AG",
                test_file.read_text(encoding="utf-8"),
            )

    @patch("sys.stdout", new_callable=StringIO)
    def test_update_header_ok_in_file(self, mock_stdout):
        year = "2021"
        license_id = "AGPL-3.0-or-later"

        header = HEADER.format(date="2021")
        with temp_file(
            content=header, name="test.py", change_into=True
        ) as test_file:
            update_file(
                test_file,
                year,
                license_id,
                self.company,
            )

            ret = mock_stdout.getvalue()
            self.assertEqual(
                ret,
                f"{test_file}: License Header is ok.\n",
            )
            self.assertIn(
                "# SPDX-FileCopyrightText: 2021 Greenbone AG",
                test_file.read_text(encoding="utf-8"),
            )

    def test_cleanup_file(self):
        test_content = """# Copyright (C) 2021-2022 Greenbone Networks GmbH
#
# SPDX-License-Identifier: GPL-3.0-or-later
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program.  If not, see .

import foo
import bar

foo.baz(bar.boing)
"""  # noqa: E501

        expected_content = f"""# SPDX-FileCopyrightText: 2021-{str(datetime.datetime.now().year)} Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later
#

import foo
import bar

foo.baz(bar.boing)
"""  # noqa: E501

        company = "Greenbone AG"
        year = str(datetime.datetime.now().year)
        license_id = "GPL-3.0-or-later"

        with temp_file(content=test_content, name="foo.py") as tmp:

            update_file(
                tmp,
                year,
                license_id,
                company,
                cleanup=True,
            )

            new_content = tmp.read_text(encoding="utf-8")
            self.assertEqual(expected_content, new_content)

    def test_cleanup_file_spdx_header(self):
        test_content = """
# SPDX-FileCopyrightText: 2021 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later

import foo
import bar

foo.baz(bar.boing)
"""  # noqa: E501

        expected_content = f"""
# SPDX-FileCopyrightText: 2021-{str(datetime.datetime.now().year)} Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later

import foo
import bar

foo.baz(bar.boing)
"""  # noqa: E501

        company = "Greenbone AG"
        year = str(datetime.datetime.now().year)
        license_id = "GPL-3.0-or-later"

        with temp_file(content=test_content, name="foo.py") as tmp:

            update_file(
                tmp,
                year,
                license_id,
                company,
                cleanup=True,
            )

            new_content = tmp.read_text(encoding="utf-8")
            self.assertEqual(expected_content, new_content)

    def test_cleanup_file_changed_company(self):
        test_content = """
# SPDX-FileCopyrightText: 2021 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later

import foo
import bar

foo.baz(bar.boing)
"""  # noqa: E501

        expected_content = f"""
# SPDX-FileCopyrightText: 2021-{str(datetime.datetime.now().year)} ACME Inc.
#
# SPDX-License-Identifier: GPL-3.0-or-later

import foo
import bar

foo.baz(bar.boing)
"""  # noqa: E501

        company = "ACME Inc."
        year = str(datetime.datetime.now().year)
        license_id = "GPL-3.0-or-later"

        with temp_file(content=test_content, name="foo.py") as tmp:

            update_file(
                tmp,
                year,
                license_id,
                company,
                cleanup=True,
            )

            new_content = tmp.read_text(encoding="utf-8")
            self.assertEqual(expected_content, new_content)


class ParseArgsTestCase(TestCase):
    def test_argparser_files(self):
        args = ["-f", "test.py", "-y", "2021", "-l", "AGPL-3.0-or-later"]
        args = parse_args(args)

        self.assertEqual(args.company, "Greenbone AG")
        self.assertEqual(args.files, ["test.py"])
        self.assertEqual(args.year, "2021")
        self.assertEqual(args.license_id, "AGPL-3.0-or-later")

    def test_argparser_dir(self):
        args = ["-d", ".", "-c", "-l", "AGPL-3.0-or-later"]
        args = parse_args(args)

        self.assertEqual(args.directories, ["."])
        self.assertEqual(args.company, "Greenbone AG")
        self.assertTrue(args.changed)
        self.assertEqual(args.year, str(datetime.datetime.now().year))
        self.assertEqual(args.license_id, "AGPL-3.0-or-later")

    def test_defaults(self):
        args = ["-f", "foo.txt"]
        args = parse_args(args)

        self.assertFalse(args.quiet)
        self.assertIsNone(args.log_file)
        self.assertFalse(args.changed)
        self.assertEqual(args.year, str(datetime.date.today().year))
        self.assertEqual(args.license_id, "GPL-3.0-or-later")
        self.assertEqual(args.company, "Greenbone AG")
        self.assertEqual(args.files, ["foo.txt"])
        self.assertIsNone(args.directories)
        self.assertIsNone(args.exclude_file)
        self.assertFalse(args.single_year)
        self.assertFalse(args.cleanup)

    def test_files_and_directories_mutual_exclusive(self):
        args = ["--files", "foo", "--directories", "bar"]
        with self.assertRaises(SystemExit) as cm:
            args = parse_args(args)

            self.assertIn(
                "argument -d/--directories: not allowed with argument -f/--file",
                cm.msg,
            )


class GetExcludeListTestCase(TestCase):
    def test_get_exclude_list(self):
        # Try to find the current file from two directories up...
        test_dirname = Path(__file__).parent.parent.parent
        # with a relative glob
        test_ignore_file = Path("ignore.file")
        test_ignore_file.write_text("*.py\n", encoding="utf-8")

        exclude_list = get_exclude_list(
            test_ignore_file, [test_dirname.resolve()]
        )

        self.assertIn(Path(__file__), exclude_list)

        test_ignore_file.unlink()


class MainTestCase(TestCase):
    def setUp(self) -> None:
        self.args = Namespace()
        self.args.company = "Greenbone AG"

    def test_main(self):
        args = [
            "--year",
            "2021",
            "--license",
            "AGPL-3.0-or-later",
            "--files",
            "test.py",
        ]
        with redirect_stdout(StringIO()):
            main(args)

    @patch("sys.stdout", new_callable=StringIO)
    @patch("pontos.updateheader.updateheader.parse_args")
    def test_main_never_happen(self, argparser_mock, mock_stdout):
        self.args.year = "2021"
        self.args.changed = False
        self.args.license_id = "AGPL-3.0-or-later"
        self.args.files = None
        self.args.directories = None
        self.args.verbose = 0
        self.args.log_file = None
        self.args.quiet = False
        self.args.cleanup = False
        self.args.single_year = False

        argparser_mock.return_value = self.args

        # I have no idea how or why test main ...
        with self.assertRaises(SystemExit):
            main()

        ret = mock_stdout.getvalue()
        self.assertIn(
            "Specify files to update!",
            ret,
        )

    def test_update_file_changed_no_git(self):
        args = [
            "--changed",
            "--year",
            "1999",
            "--files",
        ]

        with (
            redirect_stdout(StringIO()) as out,
            temp_directory(change_into=True) as temp_dir,
        ):
            test_file = temp_dir / "test.py"
            args.append(str(test_file))

            main(args)

            ret = out.getvalue()

            self.assertIn(
                "Could not get date of last modification via git, "
                f"using 1999 instead.{test_file}: File is not existing.",
                ret.replace("\n", ""),
            )


class RemoveOutdatedLinesTestCase(TestCase):
    def setUp(self) -> None:
        self.compiled_regexes = compile_outdated_regex()

    def test_remove_outdated_lines(self):
        test_content = """* This program is free software: you can redistribute it and/or modify
*it under the terms of the GNU Affero General Public License as
// published by the Free Software Foundation, either version 3 of the
//License, or (at your option) any later version.
# This program is distributed in the hope that it will be useful,
#but WITHOUT ANY WARRANTY; without even the implied warranty of
# modify it under the terms of the GNU General Public License
# This program is free software; you can redistribute it and/or
# version 2 as published by the Free Software Foundation.
This program is free software: you can redistribute it and/or modify"""  # noqa: E501

        new_content = remove_outdated_lines(
            content=test_content, cleanup_regexes=self.compiled_regexes
        )
        self.assertEqual(new_content, "\n")

    def test_remove_outdated_lines2(self):
        test_content = """the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
GNU Affero General Public License for more details.
* GNU General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program.  If not, see .
# -*- coding: utf-8 -*-
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA."""  # noqa: E501

        new_content = remove_outdated_lines(
            content=test_content, cleanup_regexes=self.compiled_regexes
        )
        self.assertEqual(new_content, "\n")
pontos-25.3.2/tests/version/000077500000000000000000000000001476255566300157775ustar00rootroot00000000000000pontos-25.3.2/tests/version/__init__.py000066400000000000000000000006271476255566300201150ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2020-2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later
#

import os
from contextlib import contextmanager
from pathlib import Path


@contextmanager
def use_cwd(path: Path) -> None:
    """
    Context Manager to change the current working directory temporaryly
    """
    current_cwd = Path.cwd()

    os.chdir(str(path))

    yield

    os.chdir(str(current_cwd))
pontos-25.3.2/tests/version/commands/000077500000000000000000000000001476255566300176005ustar00rootroot00000000000000pontos-25.3.2/tests/version/commands/__init__.py000066400000000000000000000001351476255566300217100ustar00rootroot00000000000000# Copyright (C) 2023 Greenbone Networks GmbH
#
# SPDX-License-Identifier: GPL-3.0-or-later
#
pontos-25.3.2/tests/version/commands/test_cargo.py000066400000000000000000000075401476255566300223120ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later

import unittest
from contextlib import contextmanager
from pathlib import Path
from typing import Iterator

import tomlkit

from pontos.testing import temp_directory, temp_file
from pontos.version import VersionError
from pontos.version.commands._cargo import CargoVersionCommand
from pontos.version.schemes import PEP440VersioningScheme

VERSION_EXAMPLE = """
[package]
name = "nasl-syntax"
version = "0.1.0"
edition = "2021"
license = "GPL-2.0-or-later"
"""
WORKSPACE_EXAMPLE = """
[workspace]
members = [
  "nasl-syntax",
  "nasl-interpreter",
  "nasl-cli",
  "storage",
  "redis-storage",
  "json-storage",
  "feed",
  "feed-verifier",
]
"""


class VerifyCargoUpdateCommandTestCase(unittest.TestCase):
    @contextmanager
    def __create_cargo_layout(self) -> Iterator[Path]:
        with temp_directory(change_into=True) as temporary_dir:
            workspace = temporary_dir / "Cargo.toml"
            workspace.write_text(WORKSPACE_EXAMPLE)
            members = tomlkit.parse(WORKSPACE_EXAMPLE)["workspace"]["members"]
            for cargo_workspace_member in members:
                npath = temporary_dir / f"{cargo_workspace_member}"
                npath.mkdir()
                pf = npath / "Cargo.toml"
                pf.write_text(
                    VERSION_EXAMPLE.replace(
                        "nasl-syntax", cargo_workspace_member
                    )
                )
            yield temporary_dir
        return None

    def test_update(self):
        def expected_changed_files(temporary_dir_with_cargo_toml):
            members = tomlkit.parse(WORKSPACE_EXAMPLE)["workspace"]["members"]
            return [
                (temporary_dir_with_cargo_toml / m / "Cargo.toml").resolve()
                for m in members
            ]

        with self.__create_cargo_layout() as temporary_dir_with_cargo_toml:
            cargo = CargoVersionCommand(PEP440VersioningScheme)
            previous = PEP440VersioningScheme.parse_version("0.1.0")
            new_version = PEP440VersioningScheme.parse_version("23.4.1")
            updated = cargo.update_version(new_version)
            self.assertEqual(updated.previous, previous)
            self.assertEqual(updated.new, new_version)
            self.assertEqual(
                updated.changed_files,
                expected_changed_files(temporary_dir_with_cargo_toml),
            )

    def test_update_fail(self):
        with self.__create_cargo_layout():
            cargo = CargoVersionCommand(PEP440VersioningScheme)
            previous = PEP440VersioningScheme.parse_version("0.1.0")
            new_version = PEP440VersioningScheme.parse_version("0.1.0")
            updated = cargo.update_version(new_version)
            self.assertEqual(updated.previous, previous)
            self.assertEqual(updated.new, new_version)
            self.assertEqual(
                updated.changed_files,
                [],
            )


class VerifyCargoVersionCommandTestCase(unittest.TestCase):
    def test_verify_failure(self):
        with temp_file(
            VERSION_EXAMPLE,
            name="Cargo.toml",
            change_into=True,
        ):
            version = PEP440VersioningScheme.parse_version("2.3.4")
            cargo = CargoVersionCommand(PEP440VersioningScheme)

            with self.assertRaisesRegex(
                VersionError,
                "Provided version 2.3.4 does not match the "
                "current version 0.1.0.",
            ):
                cargo.verify_version(version)

    def test_success(self):
        with temp_file(
            VERSION_EXAMPLE,
            name="Cargo.toml",
            change_into=True,
        ):
            version = PEP440VersioningScheme.parse_version("0.1.0")
            cargo = CargoVersionCommand(PEP440VersioningScheme)
            cargo.verify_version(version)
pontos-25.3.2/tests/version/commands/test_cmake.py000066400000000000000000000246621476255566300223030ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2020-2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later
#

# pylint: disable=protected-access

import unittest
from unittest.mock import MagicMock, patch

from pontos.testing import temp_directory, temp_file
from pontos.version import VersionError
from pontos.version.commands._cmake import (
    CMakeVersionCommand,
    CMakeVersionParser,
)
from pontos.version.schemes import PEP440VersioningScheme


class VerifyCMakeVersionCommandTestCase(unittest.TestCase):
    def test_verify_failure(self):
        with temp_file(
            "project(VERSION 1.2.3)\nset(PROJECT_DEV_VERSION 0)",
            name="CMakeLists.txt",
            change_into=True,
        ):
            cmake = CMakeVersionCommand(PEP440VersioningScheme)
            version = PEP440VersioningScheme.parse_version("2.3.4")

            with self.assertRaisesRegex(
                VersionError,
                "Provided version 2.3.4 does not match the "
                "current version 1.2.3.",
            ):
                cmake.verify_version(version)

    def test_verify_current(self):
        with temp_directory(
            change_into=True,
        ):
            cmake = CMakeVersionCommand(PEP440VersioningScheme)

            with self.assertRaisesRegex(
                VersionError, "^.*CMakeLists.txt not found."
            ):
                cmake.verify_version("current")

            with self.assertRaisesRegex(
                VersionError, "^.*CMakeLists.txt not found."
            ):
                cmake.verify_version(version=None)


class GetCurrentCMakeVersionCommandTestCase(unittest.TestCase):
    # pylint: disable=line-too-long
    @patch(
        "pontos.version.commands._cmake.CMakeVersionCommand.get_current_version",
        MagicMock(return_value=PEP440VersioningScheme.parse_version("21.4")),
    )
    def test_return_0_correct_version_on_verify(self):
        with temp_file(
            "",
            name="CMakeLists.txt",
            change_into=True,
        ):
            cmake = CMakeVersionCommand(PEP440VersioningScheme)
            version = PEP440VersioningScheme.parse_version("21.4")
            cmake.verify_version(version)

    def test_should_call_print_current_version_without_raising_exception(self):
        with temp_file(
            "project(VERSION 21)",
            name="CMakeLists.txt",
            change_into=True,
        ):
            cmake = CMakeVersionCommand(PEP440VersioningScheme)
            version = PEP440VersioningScheme.parse_version("21")
            self.assertEqual(cmake.get_current_version(), version)


class UpdateCMakeVersionCommandTestCase(unittest.TestCase):
    def test_update_version(self):
        with temp_file(
            "project(VERSION 21)\nset(PROJECT_DEV_VERSION 0)",
            name="CMakeLists.txt",
            change_into=True,
        ) as temp:
            cmake = CMakeVersionCommand(PEP440VersioningScheme)
            new_version = PEP440VersioningScheme.parse_version("22")
            previous_version = PEP440VersioningScheme.parse_version("21")
            updated = cmake.update_version(new_version)

            self.assertEqual(
                "project(VERSION 22)\nset(PROJECT_DEV_VERSION 0)",
                temp.read_text(encoding="utf8"),
            )
            self.assertEqual(updated.previous, previous_version)
            self.assertEqual(updated.new, new_version)
            self.assertEqual(updated.changed_files, [temp.resolve()])

    def test_update_dev_version(self):
        with temp_file(
            "project(VERSION 21)\nset(PROJECT_DEV_VERSION 0)",
            name="CMakeLists.txt",
            change_into=True,
        ) as temp:
            cmake = CMakeVersionCommand(PEP440VersioningScheme)
            new_version = PEP440VersioningScheme.parse_version("22.dev1")
            previous_version = PEP440VersioningScheme.parse_version("21")
            updated = cmake.update_version(new_version)

            self.assertEqual(
                "project(VERSION 22.0.0)\nset(PROJECT_DEV_VERSION 1)",
                temp.read_text(encoding="utf8"),
            )
            self.assertEqual(updated.previous, previous_version)
            self.assertEqual(updated.new, new_version)
            self.assertEqual(updated.changed_files, [temp.resolve()])

    def test_no_update(self):
        with temp_file(
            "project(VERSION 22)\nset(PROJECT_DEV_VERSION 0)",
            name="CMakeLists.txt",
            change_into=True,
        ) as temp:
            cmake = CMakeVersionCommand(PEP440VersioningScheme)
            new_version = PEP440VersioningScheme.parse_version("22")
            previous_version = PEP440VersioningScheme.parse_version("22")
            updated = cmake.update_version(new_version)

            self.assertEqual(
                "project(VERSION 22)\nset(PROJECT_DEV_VERSION 0)",
                temp.read_text(encoding="utf8"),
            )
            self.assertEqual(updated.previous, previous_version)
            self.assertEqual(updated.new, new_version)
            self.assertEqual(updated.changed_files, [])

    def test_forced_update(self):
        with temp_file(
            "project(VERSION 22)\nset(PROJECT_DEV_VERSION 0)",
            name="CMakeLists.txt",
            change_into=True,
        ) as temp:
            cmake = CMakeVersionCommand(PEP440VersioningScheme)
            new_version = PEP440VersioningScheme.parse_version("22")
            previous_version = PEP440VersioningScheme.parse_version("22")
            updated = cmake.update_version(new_version, force=True)

            self.assertEqual(
                "project(VERSION 22)\nset(PROJECT_DEV_VERSION 0)",
                temp.read_text(encoding="utf8"),
            )
            self.assertEqual(updated.previous, previous_version)
            self.assertEqual(updated.new, new_version)
            self.assertEqual(updated.changed_files, [temp.resolve()])


class ProjectFileCMakeVersionCommandTestCase(unittest.TestCase):
    def test_project_file_not_found(self):
        with temp_directory(change_into=True):
            cmd = CMakeVersionCommand(PEP440VersioningScheme)

            self.assertFalse(cmd.project_found())

    def test_project_file_found(self):
        with temp_file(name="CMakeLists.txt", change_into=True):
            cmd = CMakeVersionCommand(PEP440VersioningScheme)

            self.assertTrue(cmd.project_found())


class CMakeVersionParserTestCase(unittest.TestCase):
    def test_get_current_version_single_line_project(self):
        under_test = CMakeVersionParser("project(VERSION 2.3.4)")
        version = PEP440VersioningScheme.parse_version("2.3.4")
        self.assertEqual(under_test.get_current_version(), version)

    def test_update_version_project(self):
        under_test = CMakeVersionParser("project(VERSION 2.3.4)")
        version = PEP440VersioningScheme.parse_version("2.3.5")
        self.assertEqual(
            under_test.update_version(version),
            "project(VERSION 2.3.5)",
        )

    def test_not_confuse_version_outside_project(self):
        under_test = CMakeVersionParser(
            "non_project(VERSION 2.3.5)\nproject(VERSION 2.3.4)"
        )
        version = PEP440VersioningScheme.parse_version("2.3.4")
        self.assertEqual(under_test.get_current_version(), version)

    def test_get_current_version_multiline_project(self):
        under_test = CMakeVersionParser("project\n(\nVERSION\n\t    2.3.4)")
        version = PEP440VersioningScheme.parse_version("2.3.4")
        self.assertEqual(under_test.get_current_version(), version)

    def test_find_project_dev_version(self):
        test_cmake_lists = """
        project(
            DESCRIPTION something
            VERSION 41.41.41
            LANGUAGES c
        )
        set(
            PROJECT_DEV_VERSION 1
        )
        """
        under_test = CMakeVersionParser(test_cmake_lists)
        self.assertEqual(under_test._project_dev_version_line_number, 7)
        self.assertEqual(under_test._project_dev_version, "1")

    def test_update_project_dev_version(self):
        test_cmake_lists = """
        project(
            DESCRIPTION something
            VERSION 41.41.41
            LANGUAGES c
        )
        set(
            PROJECT_DEV_VERSION 1
        )
        """
        under_test = CMakeVersionParser(test_cmake_lists)
        version = PEP440VersioningScheme.parse_version("41.41.41")

        self.assertEqual(under_test._project_dev_version_line_number, 7)
        self.assertEqual(under_test._project_dev_version, "1")

        result = under_test.update_version(version)

        self.assertEqual(under_test._project_dev_version, "0")
        self.assertEqual(
            result,
            test_cmake_lists.replace(
                "PROJECT_DEV_VERSION 1", "PROJECT_DEV_VERSION 0"
            ),
        )

    def test_update_project_dev_version_when_succeeded_by_another_set(self):
        test_cmake_lists = """
        cmake_minimum_required(VERSION 3.1)

        project(hello_world VERSION 41.41.41)
        set(PROJECT_DEV_VERSION 1)

        add_executable(app main.c)
        """
        under_test = CMakeVersionParser(test_cmake_lists)
        version = PEP440VersioningScheme.parse_version("41.41.41")

        self.assertEqual(under_test._project_dev_version_line_number, 4)
        self.assertEqual(under_test._project_dev_version, "1")

        result = under_test.update_version(version)

        self.assertEqual(under_test._project_dev_version, "0")
        self.assertEqual(
            result,
            test_cmake_lists.replace(
                "PROJECT_DEV_VERSION 1", "PROJECT_DEV_VERSION 0"
            ),
        )

    def test_get_current_version_multiline_project_combined_token(self):
        under_test = CMakeVersionParser(
            "project\n(\nDESCRIPTION something VERSION 2.3.4 LANGUAGES c\n)"
        )
        version = PEP440VersioningScheme.parse_version("2.3.4")
        self.assertEqual(under_test.get_current_version(), version)

    def test_raise_exception_project_no_version(self):
        with self.assertRaises(ValueError) as context:
            CMakeVersionParser("project(DESCRIPTION something LANGUAGES c)")
        self.assertEqual(
            str(context.exception), "unable to find cmake version in project."
        )

    def test_raise_exception_no_project(self):
        with self.assertRaises(ValueError) as context:
            CMakeVersionParser(
                "non_project(VERSION 2.3.5)",
            )

        self.assertEqual(
            str(context.exception), "unable to find cmake version."
        )
pontos-25.3.2/tests/version/commands/test_go.py000066400000000000000000000241141476255566300216200ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2021-2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later
#

import unittest
from dataclasses import dataclass
from pathlib import Path
from unittest.mock import MagicMock, patch

from pontos.testing import temp_directory, temp_file
from pontos.version import VersionError
from pontos.version.commands._go import GoVersionCommand
from pontos.version.schemes import SemanticVersioningScheme


@dataclass
class StdOutput:
    stdout: bytes


VERSION_FILE_PATH = "version.go"
TEMPLATE = """package {}

// THIS IS AN AUTOGENERATED FILE. DO NOT TOUCH!

var version = "{}"
\n"""


class GetCurrentGoVersionCommandTestCase(unittest.TestCase):
    def test_getting_version(self):
        with temp_file(
            name="go.mod",
            change_into=True,
        ):
            version = "0.0.1"
            version_file_path = Path(VERSION_FILE_PATH)
            version_file_path.write_text(
                TEMPLATE.format("main", version), encoding="utf-8"
            )
            result_version = GoVersionCommand(
                SemanticVersioningScheme
            ).get_current_version()

            self.assertEqual(
                result_version, SemanticVersioningScheme.parse_version(version)
            )
            version_file_path.unlink()

    def test_no_version_found(self):
        exp_err_msg = "No version found in the version.go file."
        with (
            temp_file(
                name="go.mod",
                change_into=True,
            ),
            self.assertRaisesRegex(
                VersionError,
                exp_err_msg,
            ),
        ):
            version_file_path = Path(VERSION_FILE_PATH)
            version_file_path.touch()
            GoVersionCommand(SemanticVersioningScheme).get_current_version()

    def test_no_version_file(self):
        exp_err_msg = (
            "No version.go file found. This file is required for pontos"
        )
        with (
            temp_file(
                name="go.mod",
                change_into=True,
            ),
            self.assertRaisesRegex(
                VersionError,
                exp_err_msg,
            ),
        ):
            GoVersionCommand(SemanticVersioningScheme).get_current_version()

    def test_invalid_version(self):
        with (
            temp_file(
                name="go.mod",
                change_into=True,
            ),
            self.assertRaisesRegex(
                VersionError, "abc is not valid SemVer string"
            ),
        ):
            version_file_path = Path(VERSION_FILE_PATH)
            version_file_path.write_text(
                TEMPLATE.format("main", "abc"), encoding="utf-8"
            )
            GoVersionCommand(SemanticVersioningScheme).get_current_version()


class VerifyGoVersionCommandTestCase(unittest.TestCase):
    def test_verify_version(self):
        with (
            temp_file(
                name="go.mod",
                change_into=True,
            ),
            patch.object(
                GoVersionCommand,
                "get_current_version",
                MagicMock(
                    return_value=SemanticVersioningScheme.parse_version(
                        "21.0.1"
                    )
                ),
            ),
        ):
            cmd = GoVersionCommand(SemanticVersioningScheme)
            cmd.verify_version(SemanticVersioningScheme.parse_version("21.0.1"))

    def test_verify_branch_not_equal(self):
        with (
            temp_file(
                name="go.mod",
                change_into=True,
            ),
            patch.object(
                GoVersionCommand,
                "get_current_version",
                MagicMock(
                    return_value=SemanticVersioningScheme.parse_version(
                        "21.0.1"
                    )
                ),
            ),
            self.assertRaisesRegex(
                VersionError,
                "Provided version 21.2.0 does not match the current version "
                "21.0.1.",
            ),
        ):
            cmd = GoVersionCommand(SemanticVersioningScheme)
            cmd.verify_version(SemanticVersioningScheme.parse_version("21.2.0"))

    def test_verify_current(self):
        with (
            temp_file(
                name="go.mod",
                change_into=True,
            ),
            patch.object(
                GoVersionCommand,
                "get_current_version",
                MagicMock(
                    return_value=SemanticVersioningScheme.parse_version(
                        "21.0.1"
                    )
                ),
            ),
        ):
            cmd = GoVersionCommand(SemanticVersioningScheme)
            cmd.verify_version("current")
            cmd.verify_version(version=None)

    def test_verify_current_failure(self):
        with temp_file(
            name="go.mod",
            change_into=True,
        ):
            cmd = GoVersionCommand(SemanticVersioningScheme)

            with self.assertRaisesRegex(
                VersionError,
                "^No version.go file found. This file is required for pontos",
            ):
                cmd.verify_version("current")

            with self.assertRaisesRegex(
                VersionError,
                "^No version.go file found. This file is required for pontos",
            ):
                cmd.verify_version(version=None)


class UpdateGoVersionCommandTestCase(unittest.TestCase):
    def test_no_file_update_version(self):
        with temp_directory(change_into=True) as temp:
            go_mod = temp / "go.mod"
            go_mod.touch()

            version = SemanticVersioningScheme.parse_version("22.2.2")
            updated_version_obj = GoVersionCommand(
                SemanticVersioningScheme
            ).update_version(version)
            version_file_path = Path(VERSION_FILE_PATH)
            content = version_file_path.read_text(encoding="utf-8")

            self.assertIn(str(version), content)

            self.assertIsNone(updated_version_obj.previous)
            self.assertEqual(updated_version_obj.new, version)
            self.assertEqual(
                updated_version_obj.changed_files, [version_file_path]
            )

    def test_update_version(self):
        with temp_file(name="go.mod", change_into=True):
            cmd = GoVersionCommand(SemanticVersioningScheme)
            version = SemanticVersioningScheme.parse_version("22.2.2")
            version_file_path = Path(VERSION_FILE_PATH)
            version_file_path.write_text(
                TEMPLATE.format("foo", "0.0.1"), encoding="utf-8"
            )
            updated = cmd.update_version(version)

            content = version_file_path.read_text(encoding="utf-8")
            self.assertIn(str(version), content)
            self.assertIn("foo", content)
            version_file_path.unlink()

            self.assertEqual(updated.new, version)
            self.assertEqual(
                updated.previous,
                SemanticVersioningScheme.parse_version("0.0.1"),
            )
            self.assertEqual(updated.changed_files, [version_file_path])

    def test_create_file_update_version(self):
        with temp_file(name="go.mod", change_into=True):
            with patch.object(
                GoVersionCommand,
                "get_current_version",
                MagicMock(
                    return_value=SemanticVersioningScheme.parse_version(
                        "21.0.1"
                    )
                ),
            ):
                version = SemanticVersioningScheme.parse_version("22.2.2")
                cmd = GoVersionCommand(SemanticVersioningScheme)
                updated = cmd.update_version(version)

                version_file_path = Path(VERSION_FILE_PATH)
                content = version_file_path.read_text(encoding="utf-8")

                self.assertIn(str(version), content)
                version_file_path.unlink()

                self.assertEqual(updated.new, version)
                self.assertEqual(
                    updated.previous,
                    SemanticVersioningScheme.parse_version("21.0.1"),
                )
                self.assertEqual(updated.changed_files, [version_file_path])

    def test_no_update(self):
        with temp_file(name="go.mod", change_into=True):
            cmd = GoVersionCommand(SemanticVersioningScheme)
            version = SemanticVersioningScheme.parse_version("22.2.2")
            version_file_path = Path(VERSION_FILE_PATH)
            version_file_path.write_text(
                TEMPLATE.format("main", "22.2.2"), encoding="utf-8"
            )
            updated = cmd.update_version(version)

            content = version_file_path.read_text(encoding="utf-8")
            self.assertIn(str(version), content)

            self.assertEqual(updated.new, version)
            self.assertEqual(updated.previous, version)
            self.assertEqual(updated.changed_files, [])

    def test_forced_update(self):
        with temp_file(name="go.mod", change_into=True):
            cmd = GoVersionCommand(SemanticVersioningScheme)
            version = SemanticVersioningScheme.parse_version("22.2.2")
            version_file_path = Path(VERSION_FILE_PATH)
            version_file_path.write_text(
                TEMPLATE.format("main", "22.2.2"), encoding="utf-8"
            )
            updated = cmd.update_version(version, force=True)

            content = version_file_path.read_text(encoding="utf-8")
            self.assertIn(str(version), content)

            self.assertEqual(updated.new, version)
            self.assertEqual(updated.previous, version)
            self.assertEqual(updated.changed_files, [version_file_path])


class ProjectFileGoVersionCommandTestCase(unittest.TestCase):
    def test_project_file_not_found(self):
        with temp_directory(change_into=True):
            cmd = GoVersionCommand(SemanticVersioningScheme)

            self.assertFalse(cmd.project_found())

    def test_project_file_found(self):
        with temp_file(name="go.mod", change_into=True):
            cmd = GoVersionCommand(SemanticVersioningScheme)

            self.assertTrue(cmd.project_found())
pontos-25.3.2/tests/version/commands/test_java.py000066400000000000000000000303301476255566300221310ustar00rootroot00000000000000# Copyright (C) 2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later
#
import unittest
from pathlib import Path
from string import Template

from pontos.testing import temp_directory, temp_file
from pontos.version import VersionError
from pontos.version.commands import JavaVersionCommand
from pontos.version.schemes import SemanticVersioningScheme

TEMPLATE_UPGRADE_VERSION_SINGLE_JSON = """{
  "files": [
    {
      "path": "README.md",
      "line": 3
    }
  ]
}
"""

TEMPLATE_UPGRADE_VERSION_MULTI_JSON = """{
  "files": [
    {
      "path": "README.md",
      "line": 3
    },
    {
      "path": "application.properties",
      "line": 2
    }
  ]
}
"""

TEMPLATE_UPGRADE_VERSION_WITH_LINE_JSON = Template(
    """{
  "files": [
    {
      "path": "README.md",
      "line": ${LINE_NO}
    }
  ]
}
"""
)

TEMPLATE_UPGRADE_VERSION_MARKDOWN = """# Task service

**task service**: Version {}

## starting the local 
"""

TEMPLATE_UPGRADE_VERSION_WITH_VERSION_PROPERTIES = """# application
sentry.release={}
server.port=8080
"""


class VerifyJavaVersionParsingTestCase(unittest.TestCase):
    def test_version_parsing(self):
        versions = {
            "2023.12.10",
            "2023.1.1",
            "2023.10.1",
            "2023.1.99",
            "0.0.1",
            "1.2.3-a1",
            "1.2.3-alpha1",
            "1.2.3-alpha1-dev1",
            "1.2.3-b1",
            "1.2.3-beta1",
            "1.2.3-beta1-dev1",
            "1.2.3-rc1",
            "1.2.3-rc1-dev1",
            "1.2.3-dev1",
            "22.4.1",
            "22.4.1-dev1",
            "0.5.0.dev1",
            "1.0.0-dev1",
            "1.0.0-alpha1",
            "1.0.0-alpha1-dev1",
            "1.0.0-beta1",
            "1.0.0-beta1-dev1",
            "1.0.0-rc1",
            "1.0.0-rc1-dev1",
        }
        for version in versions:
            with self.subTest(version=version):
                matches = JavaVersionCommand(
                    SemanticVersioningScheme
                ).parse_line(f"pre{version}post")

                self.assertEqual(matches.group("pre"), "pre")
                self.assertEqual(matches.group("version"), version)
                self.assertEqual(matches.group("post"), "post")


class GetCurrentJavaVersionCommandTestCase(unittest.TestCase):
    def test_getting_version(self):
        with temp_directory(change_into=True):
            version_file_path = Path("upgradeVersion.json")
            version_file_path.write_text(
                TEMPLATE_UPGRADE_VERSION_SINGLE_JSON,
                encoding="utf-8",
            )

            version = "2023.9.3"
            readme_file_path = Path("README.md")
            readme_file_path.write_text(
                TEMPLATE_UPGRADE_VERSION_MARKDOWN.format(version),
                encoding="utf-8",
            )

            result_version = JavaVersionCommand(
                SemanticVersioningScheme
            ).get_current_version()

            self.assertEqual(
                result_version, SemanticVersioningScheme.parse_version(version)
            )

            version_file_path.unlink()
            readme_file_path.unlink()

    def test_getting_version_no_files_configured(self):
        exp_err_msg = "no version found"
        with (
            temp_directory(change_into=True),
            self.assertRaisesRegex(
                VersionError,
                exp_err_msg,
            ),
        ):
            version_file_path = Path("upgradeVersion.json")
            version_file_path.write_text(
                """{"files": []}""",
                encoding="utf-8",
            )

            JavaVersionCommand(SemanticVersioningScheme).get_current_version()

            version_file_path.unlink()

    def test_getting_version_without_version_config(self):
        exp_err_msg = (
            r"No /tmp/.*/upgradeVersion\.json config file found\. "
            r"This file is required for pontos"
        )
        with (
            temp_directory(change_into=True),
            self.assertRaisesRegex(
                VersionError,
                exp_err_msg,
            ),
        ):
            JavaVersionCommand(SemanticVersioningScheme).get_current_version()


class VerifyJavaVersionCommandTestCase(unittest.TestCase):
    def test_verify_version(self):
        with temp_directory(change_into=True):
            version_file_path = Path("upgradeVersion.json")
            version_file_path.write_text(
                TEMPLATE_UPGRADE_VERSION_MULTI_JSON, encoding="utf-8"
            )

            version = "2023.9.3"
            readme_file_path = Path("README.md")
            readme_file_path.write_text(
                TEMPLATE_UPGRADE_VERSION_MARKDOWN.format(version),
                encoding="utf-8",
            )
            properties_file_path = Path("application.properties")
            properties_file_path.write_text(
                TEMPLATE_UPGRADE_VERSION_WITH_VERSION_PROPERTIES.format(
                    version
                ),
                encoding="latin-1",
            )

            JavaVersionCommand(SemanticVersioningScheme).verify_version(
                SemanticVersioningScheme.parse_version(version)
            )

            version_file_path.unlink()
            readme_file_path.unlink()
            properties_file_path.unlink()

    def test_verify_version_does_not_match(self):
        exp_err_msg = (
            r"Provided version 2023\.9\.4 does not match the "
            + r"current version 2023\.9\.3 "
            + r"in '/tmp/.*/upgradeVersion\.json'"
        )

        with (
            temp_directory(change_into=True),
            self.assertRaisesRegex(
                VersionError,
                exp_err_msg,
            ),
        ):
            version_file_path = Path("upgradeVersion.json")
            version_file_path.write_text(
                TEMPLATE_UPGRADE_VERSION_SINGLE_JSON, encoding="utf-8"
            )

            version = "2023.9.3"
            new_version = "2023.9.4"
            readme_file_path = Path("README.md")
            readme_file_path.write_text(
                TEMPLATE_UPGRADE_VERSION_MARKDOWN.format(version),
                encoding="utf-8",
            )

            JavaVersionCommand(SemanticVersioningScheme).verify_version(
                SemanticVersioningScheme.parse_version(new_version)
            )

            version_file_path.unlink()
            readme_file_path.unlink()


class UpdateJavaVersionCommandTestCase(unittest.TestCase):
    def test_update_version(self):
        with temp_directory(change_into=True):
            version_file_path = Path("upgradeVersion.json")
            version_file_path.write_text(
                TEMPLATE_UPGRADE_VERSION_SINGLE_JSON, encoding="utf-8"
            )

            version = "2023.9.3"
            readme_file_path = Path("README.md")
            readme_file_path.write_text(
                TEMPLATE_UPGRADE_VERSION_MARKDOWN.format(version),
                encoding="utf-8",
            )

            new_version = "2023.9.4"
            updated_version_obj = JavaVersionCommand(
                SemanticVersioningScheme
            ).update_version(
                SemanticVersioningScheme.parse_version(new_version)
            )

            self.assertEqual(
                updated_version_obj.previous,
                SemanticVersioningScheme.parse_version(version),
            )
            self.assertEqual(
                updated_version_obj.new,
                SemanticVersioningScheme.parse_version(new_version),
            )
            self.assertEqual(
                updated_version_obj.changed_files, [Path("README.md")]
            )

            content = readme_file_path.read_text(encoding="UTF-8")
            self.assertEqual(
                content,
                TEMPLATE_UPGRADE_VERSION_MARKDOWN.format(new_version),
            )

            version_file_path.unlink()
            readme_file_path.unlink()

    def test_no_update_version(self):
        with temp_directory(change_into=True):
            version_file_path = Path("upgradeVersion.json")
            version_file_path.write_text(
                TEMPLATE_UPGRADE_VERSION_SINGLE_JSON,
                encoding="utf-8",
            )

            version = "2023.9.3"
            readme_file_path = Path("README.md")
            readme_file_path.write_text(
                TEMPLATE_UPGRADE_VERSION_MARKDOWN.format(version),
                encoding="utf-8",
            )

            updated_version_obj = JavaVersionCommand(
                SemanticVersioningScheme
            ).update_version(SemanticVersioningScheme.parse_version(version))

            self.assertEqual(
                updated_version_obj.previous,
                SemanticVersioningScheme.parse_version(version),
            )
            self.assertEqual(
                updated_version_obj.new,
                SemanticVersioningScheme.parse_version(version),
            )
            self.assertEqual(
                updated_version_obj.changed_files,
                [],
            )

            content = readme_file_path.read_text(encoding="UTF-8")
            self.assertEqual(
                content,
                TEMPLATE_UPGRADE_VERSION_MARKDOWN.format(version),
            )

            version_file_path.unlink()
            readme_file_path.unlink()

    def test_forced_update_version(self):
        with temp_directory(change_into=True):
            version_file_path = Path("upgradeVersion.json")
            version_file_path.write_text(
                TEMPLATE_UPGRADE_VERSION_SINGLE_JSON,
                encoding="utf-8",
            )

            version = "2023.9.3"
            readme_file_path = Path("README.md")
            readme_file_path.write_text(
                TEMPLATE_UPGRADE_VERSION_MARKDOWN.format(version),
                encoding="utf-8",
            )

            updated_version_obj = JavaVersionCommand(
                SemanticVersioningScheme
            ).update_version(
                SemanticVersioningScheme.parse_version(version), force=True
            )

            self.assertEqual(
                updated_version_obj.previous,
                SemanticVersioningScheme.parse_version(version),
            )
            self.assertEqual(
                updated_version_obj.new,
                SemanticVersioningScheme.parse_version(version),
            )
            self.assertEqual(
                updated_version_obj.changed_files,
                [Path("README.md")],
            )

            content = readme_file_path.read_text(encoding="UTF-8")
            self.assertEqual(
                content,
                TEMPLATE_UPGRADE_VERSION_MARKDOWN.format(version),
            )

            version_file_path.unlink()
            readme_file_path.unlink()

    def test_update_version_upgrade_config_with_wrong_line_number(self):
        exp_err_msg = (
            "Line has no version, "
            "file:'README.md' "
            "lineNo:4 "
            "content:'\n'"
        )
        with (
            temp_directory(change_into=True),
            self.assertRaisesRegex(
                VersionError,
                exp_err_msg,
            ),
        ):
            version_file_path = Path("upgradeVersion.json")
            version_file_path.write_text(
                TEMPLATE_UPGRADE_VERSION_WITH_LINE_JSON.substitute(LINE_NO="4"),
                encoding="utf-8",
            )

            version = "2023.9.3"
            readme_file_path = Path("README.md")
            readme_file_path.write_text(
                TEMPLATE_UPGRADE_VERSION_MARKDOWN.format(version),
                encoding="utf-8",
            )

            new_version = "2023.9.4"
            JavaVersionCommand(SemanticVersioningScheme).update_version(
                SemanticVersioningScheme.parse_version(new_version)
            )

            version_file_path.unlink()
            readme_file_path.unlink()


class ProjectFileJavaVersionCommandTestCase(unittest.TestCase):
    def test_project_file_not_found(self):
        with temp_directory(change_into=True):
            cmd = JavaVersionCommand(SemanticVersioningScheme)

            self.assertFalse(cmd.project_found())

    def test_project_file_found(self):
        with temp_file(name="upgradeVersion.json", change_into=True):
            cmd = JavaVersionCommand(SemanticVersioningScheme)

            self.assertTrue(cmd.project_found())
pontos-25.3.2/tests/version/commands/test_javascript.py000066400000000000000000000411141476255566300233600ustar00rootroot00000000000000# Copyright (C) 2022-2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later
#


import json
import unittest
from unittest.mock import MagicMock, patch

from pontos.testing import temp_directory, temp_file
from pontos.version import VersionError
from pontos.version.commands import JavaScriptVersionCommand
from pontos.version.schemes import SemanticVersioningScheme


class GetCurrentJavaScriptVersionCommandTestCase(unittest.TestCase):
    def test_get_current_version(self):
        content = '{"name": "foo", "version": "1.2.3"}'
        with temp_file(content, name="package.json", change_into=True):
            cmd = JavaScriptVersionCommand(SemanticVersioningScheme)
            version = cmd.get_current_version()

            self.assertEqual(
                version, SemanticVersioningScheme.parse_version("1.2.3")
            )

    def test_no_project_file(self):
        with (
            temp_directory(change_into=True),
            self.assertRaisesRegex(VersionError, ".* file not found."),
        ):
            cmd = JavaScriptVersionCommand(SemanticVersioningScheme)
            cmd.get_current_version()

    def test_no_package_version(self):
        content = '{"name": "foo"}'
        with (
            temp_file(content, name="package.json", change_into=True),
            self.assertRaisesRegex(VersionError, "Version field missing in"),
        ):
            cmd = JavaScriptVersionCommand(SemanticVersioningScheme)
            cmd.get_current_version()

    def test_no_valid_json_in_package_version(self):
        content = "{"
        with (
            temp_file(content, name="package.json", change_into=True),
            self.assertRaisesRegex(VersionError, "No valid JSON found."),
        ):
            cmd = JavaScriptVersionCommand(SemanticVersioningScheme)
            cmd.get_current_version()


class UpdateJavaScriptVersionCommandTestCase(unittest.TestCase):
    def test_update_version_file(self):
        content = '{"name":"foo", "version":"1.2.3"}'

        with temp_file(content, name="package.json", change_into=True) as temp:
            cmd = JavaScriptVersionCommand(SemanticVersioningScheme)
            cmd.get_current_version()
            updated = cmd.update_version(
                SemanticVersioningScheme.parse_version("22.4.0")
            )

            self.assertEqual(
                updated.previous,
                SemanticVersioningScheme.parse_version("1.2.3"),
            )
            self.assertEqual(
                updated.new, SemanticVersioningScheme.parse_version("22.4.0")
            )
            self.assertEqual(updated.changed_files, [temp.resolve()])

            with temp.open(mode="r", encoding="utf-8") as fp:
                fake_package = json.load(fp)

            self.assertEqual(fake_package["version"], "22.4.0")

    def test_update_js_version_file(self):
        content = '{"name":"foo", "version":"1.2.3"}'
        js_content = """const foo = "bar";
const VERSION = "1.2.3";
const func = () => ();
"""

        with temp_directory(change_into=True) as temp_dir:
            package_json = temp_dir / "package.json"
            package_json.write_text(content, encoding="utf8")
            js_version_file = (
                temp_dir / JavaScriptVersionCommand.version_file_paths[0]
            )
            js_version_file.parent.mkdir()
            js_version_file.write_text(js_content, encoding="utf8")

            cmd = JavaScriptVersionCommand(SemanticVersioningScheme)
            updated = cmd.update_version(
                SemanticVersioningScheme.parse_version("22.4.0")
            )

            self.assertEqual(
                updated.previous,
                SemanticVersioningScheme.parse_version("1.2.3"),
            )
            self.assertEqual(
                updated.new, SemanticVersioningScheme.parse_version("22.4.0")
            )
            self.assertEqual(
                updated.changed_files,
                [
                    package_json.resolve(),
                    JavaScriptVersionCommand.version_file_paths[0],
                ],
            )

            with package_json.open(mode="r", encoding="utf-8") as fp:
                fake_package = json.load(fp)

            self.assertEqual(fake_package["version"], "22.4.0")

            self.assertEqual(
                js_version_file.read_text(encoding="utf8"),
                'const foo = "bar";\nconst VERSION = "22.4.0";\n'
                "const func = () => ();\n",
            )

    def test_update_js_version_file_with_single_quotes(self):
        content = '{"name":"foo", "version":"1.2.3"}'
        js_content = """const foo = "bar";
const VERSION = '1.2.3';
const func = () => ();
"""

        with temp_directory(change_into=True) as temp_dir:
            package_json = temp_dir / "package.json"
            package_json.write_text(content, encoding="utf8")
            js_version_file = (
                temp_dir / JavaScriptVersionCommand.version_file_paths[0]
            )
            js_version_file.parent.mkdir()
            js_version_file.write_text(js_content, encoding="utf8")

            cmd = JavaScriptVersionCommand(SemanticVersioningScheme)
            updated = cmd.update_version(
                SemanticVersioningScheme.parse_version("22.4.0")
            )

            self.assertEqual(
                updated.previous,
                SemanticVersioningScheme.parse_version("1.2.3"),
            )
            self.assertEqual(
                updated.new, SemanticVersioningScheme.parse_version("22.4.0")
            )
            self.assertEqual(
                updated.changed_files,
                [
                    package_json.resolve(),
                    JavaScriptVersionCommand.version_file_paths[0],
                ],
            )

            with package_json.open(mode="r", encoding="utf-8") as fp:
                fake_package = json.load(fp)

            self.assertEqual(fake_package["version"], "22.4.0")

            self.assertEqual(
                js_version_file.read_text(encoding="utf8"),
                "const foo = \"bar\";\nconst VERSION = '22.4.0';\n"
                "const func = () => ();\n",
            )

    def test_update_version_files(self):
        content = '{"name":"foo", "version":"1.2.3"}'
        file_content = """const foo = "bar";
const VERSION = "1.2.3";
const func = () => ();
"""

        with temp_directory(change_into=True) as temp_dir:
            package_json = temp_dir / "package.json"
            package_json.write_text(content, encoding="utf8")

            js_version_file = (
                temp_dir / JavaScriptVersionCommand.version_file_paths[0]
            )
            js_version_file.parent.mkdir()
            js_version_file.write_text(file_content, encoding="utf8")

            ts_version_file = (
                temp_dir / JavaScriptVersionCommand.version_file_paths[1]
            )
            ts_version_file.write_text(file_content, encoding="utf8")

            cmd = JavaScriptVersionCommand(SemanticVersioningScheme)
            updated = cmd.update_version(
                SemanticVersioningScheme.parse_version("22.4.0")
            )

            self.assertEqual(
                updated.previous,
                SemanticVersioningScheme.parse_version("1.2.3"),
            )
            self.assertEqual(
                updated.new, SemanticVersioningScheme.parse_version("22.4.0")
            )
            self.assertEqual(
                updated.changed_files,
                [
                    package_json.resolve(),
                    JavaScriptVersionCommand.version_file_paths[0],
                    JavaScriptVersionCommand.version_file_paths[1],
                ],
            )

            with package_json.open(mode="r", encoding="utf-8") as fp:
                fake_package = json.load(fp)

            self.assertEqual(fake_package["version"], "22.4.0")

            self.assertEqual(
                js_version_file.read_text(encoding="utf8"),
                'const foo = "bar";\nconst VERSION = "22.4.0";\n'
                "const func = () => ();\n",
            )

    def test_update_version_develop(self):
        content = '{"name":"foo", "version":"1.2.3"}'

        with temp_file(content, name="package.json", change_into=True) as temp:
            cmd = JavaScriptVersionCommand(SemanticVersioningScheme)
            new_version = SemanticVersioningScheme.parse_version("22.4.0-dev1")
            updated = cmd.update_version(new_version)

            self.assertEqual(
                updated.previous,
                SemanticVersioningScheme.parse_version("1.2.3"),
            )
            self.assertEqual(updated.new, new_version)
            self.assertEqual(updated.changed_files, [temp.resolve()])

            with temp.open(mode="r", encoding="utf-8") as fp:
                fake_package = json.load(fp)

            self.assertEqual(fake_package["version"], "22.4.0-dev1")

    def test_no_update(self):
        content = '{"name":"foo", "version":"1.2.3"}'

        with temp_file(content, name="package.json", change_into=True) as temp:
            cmd = JavaScriptVersionCommand(SemanticVersioningScheme)
            updated = cmd.update_version(
                SemanticVersioningScheme.parse_version("1.2.3")
            )

            self.assertEqual(
                updated.previous,
                SemanticVersioningScheme.parse_version("1.2.3"),
            )
            self.assertEqual(
                updated.new, SemanticVersioningScheme.parse_version("1.2.3")
            )
            self.assertEqual(updated.changed_files, [])

            with temp.open(mode="r", encoding="utf-8") as fp:
                fake_package = json.load(fp)

            self.assertEqual(fake_package["version"], "1.2.3")

    def test_forced_update(self):
        content = '{"name":"foo", "version":"1.2.3"}'

        with temp_file(content, name="package.json", change_into=True) as temp:
            cmd = JavaScriptVersionCommand(SemanticVersioningScheme)
            updated = cmd.update_version(
                SemanticVersioningScheme.parse_version("1.2.3"), force=True
            )

            self.assertEqual(
                updated.previous,
                SemanticVersioningScheme.parse_version("1.2.3"),
            )
            self.assertEqual(
                updated.new, SemanticVersioningScheme.parse_version("1.2.3")
            )
            self.assertEqual(updated.changed_files, [temp.resolve()])

            with temp.open(mode="r", encoding="utf-8") as fp:
                fake_package = json.load(fp)

            self.assertEqual(fake_package["version"], "1.2.3")


class VerifyJavaScriptVersionCommandTestCase(unittest.TestCase):
    def test_versions_not_equal(self):
        with (
            patch.object(
                JavaScriptVersionCommand,
                "get_current_version",
                MagicMock(
                    return_value=SemanticVersioningScheme.parse_version("1.2.3")
                ),
            ),
            self.assertRaisesRegex(
                VersionError,
                "Provided version .* does not match the current version .*",
            ),
        ):
            cmd = JavaScriptVersionCommand(SemanticVersioningScheme)
            cmd.verify_version(SemanticVersioningScheme.parse_version("22.4.0"))

    def test_verify_success(self):
        with patch.object(
            JavaScriptVersionCommand,
            "get_current_version",
            MagicMock(
                return_value=SemanticVersioningScheme.parse_version("22.4.0")
            ),
        ):
            cmd = JavaScriptVersionCommand(SemanticVersioningScheme)
            cmd.verify_version(SemanticVersioningScheme.parse_version("22.4.0"))

    def test_verify_js_mismatch(self):
        with (
            patch.object(
                JavaScriptVersionCommand,
                "get_current_version",
                MagicMock(
                    return_value=SemanticVersioningScheme.parse_version(
                        "22.4.0"
                    )
                ),
            ),
            patch.object(
                JavaScriptVersionCommand,
                "_get_current_file_version",
                MagicMock(
                    return_value=SemanticVersioningScheme.parse_version(
                        "22.5.0"
                    )
                ),
            ),
            self.assertRaisesRegex(
                VersionError,
                "Provided version 22.4.0 does not match the current version 22.5.0 "
                "in src/version.js.",
            ),
        ):
            cmd = JavaScriptVersionCommand(SemanticVersioningScheme)
            cmd.verify_version(SemanticVersioningScheme.parse_version("22.4.0"))

    def test_verify_ts_mismatch(self):
        with (
            patch.object(
                JavaScriptVersionCommand,
                "get_current_version",
                MagicMock(
                    return_value=SemanticVersioningScheme.parse_version(
                        "22.4.0"
                    )
                ),
            ),
            patch.object(
                JavaScriptVersionCommand,
                "_get_current_file_version",
                MagicMock(
                    side_effect=[
                        SemanticVersioningScheme.parse_version("22.4.0"),
                        SemanticVersioningScheme.parse_version("22.5.0"),
                    ]
                ),
            ),
            self.assertRaisesRegex(
                VersionError,
                "Provided version 22.4.0 does not match the current version 22.5.0 "
                "in src/version.ts.",
            ),
        ):
            cmd = JavaScriptVersionCommand(SemanticVersioningScheme)
            cmd.verify_version(SemanticVersioningScheme.parse_version("22.4.0"))

    def test_verify_current(self):
        with patch.object(
            JavaScriptVersionCommand,
            "get_current_version",
            MagicMock(
                return_value=SemanticVersioningScheme.parse_version("22.4.0")
            ),
        ):
            cmd = JavaScriptVersionCommand(SemanticVersioningScheme)
            cmd.verify_version("current")
            cmd.verify_version(None)

    def test_verify_current_failure(self):
        with temp_directory(change_into=True):
            cmd = JavaScriptVersionCommand(SemanticVersioningScheme)

            with self.assertRaisesRegex(
                VersionError, "^.*package.json file not found"
            ):
                cmd.verify_version("current")

            with self.assertRaisesRegex(
                VersionError, "^.*package.json file not found"
            ):
                cmd.verify_version(None)

    def test_verify_current_js_version_matches(self):
        content = '{"name":"foo", "version":"1.2.3"}'
        js_content = 'const VERSION = "1.2.3";'

        with temp_directory(change_into=True) as temp_dir:
            package_json = temp_dir / "package.json"
            package_json.write_text(content, encoding="utf8")
            js_version_file = (
                temp_dir / JavaScriptVersionCommand.version_file_paths[0]
            )
            js_version_file.parent.mkdir()
            js_version_file.write_text(js_content, encoding="utf8")

            cmd = JavaScriptVersionCommand(SemanticVersioningScheme)
            cmd.verify_version("current")
            cmd.verify_version(None)

    def test_verify_current_js_mismatch(self):
        content = '{"name":"foo", "version":"1.2.3"}'
        js_content = 'const VERSION = "1.2.4";'

        with (
            temp_directory(change_into=True) as temp_dir,
            self.assertRaisesRegex(
                VersionError,
                "The version 1.2.4 in src/version.js doesn't match the current "
                "version 1.2.3.",
            ),
        ):
            package_json = temp_dir / "package.json"
            package_json.write_text(content, encoding="utf8")
            js_version_file = (
                temp_dir / JavaScriptVersionCommand.version_file_paths[0]
            )
            js_version_file.parent.mkdir()
            js_version_file.write_text(js_content, encoding="utf8")

            cmd = JavaScriptVersionCommand(SemanticVersioningScheme)
            cmd.verify_version("current")
            cmd.verify_version(None)


class ProjectFileJavaScriptVersionCommandTestCase(unittest.TestCase):
    def test_project_file_not_found(self):
        with temp_directory(change_into=True):
            cmd = JavaScriptVersionCommand(SemanticVersioningScheme)

            self.assertFalse(cmd.project_found())

    def test_project_file_found(self):
        with temp_file(name="package.json", change_into=True):
            cmd = JavaScriptVersionCommand(SemanticVersioningScheme)

            self.assertTrue(cmd.project_found())
pontos-25.3.2/tests/version/commands/test_python.py000066400000000000000000000405341476255566300225400ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2020-2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later
#

# pylint: disable = protected-access

import unittest
from pathlib import Path
from unittest.mock import MagicMock, PropertyMock, patch

import tomlkit

from pontos.testing import temp_directory, temp_file, temp_python_module
from pontos.version import VersionError
from pontos.version.commands._python import PythonVersionCommand
from pontos.version.schemes import (
    PEP440VersioningScheme,
    SemanticVersioningScheme,
)


class GetCurrentPythonVersionCommandTestCase(unittest.TestCase):
    def test_missing_tool_pontos_version_section(self):
        with (
            temp_file("[tool.pontos]", name="pyproject.toml", change_into=True),
            self.assertRaisesRegex(
                VersionError,
                r"^\[tool\.pontos\.version\] section missing in .*\.$",
            ),
        ):
            cmd = PythonVersionCommand(PEP440VersioningScheme)
            cmd.get_current_version()

    def test_missing_version_module_file_key(self):
        with (
            temp_file(
                '[tool.pontos.version]\nname="foo"',
                name="pyproject.toml",
                change_into=True,
            ),
            self.assertRaisesRegex(
                VersionError,
                r"^version-module-file key not set in \[tool\.pontos\.version\] "
                r"section .*\.$",
            ),
        ):
            cmd = PythonVersionCommand(PEP440VersioningScheme)
            cmd.get_current_version()

    def test_version_file_path(self):
        with temp_file(
            '[tool.pontos.version]\nversion-module-file="foo/__version__.py"',
            name="pyproject.toml",
            change_into=True,
        ):
            cmd = PythonVersionCommand(PEP440VersioningScheme)

            self.assertEqual(
                cmd.version_file_path, Path("foo") / "__version__.py"
            )

    def test_pyproject_toml_file_not_exists(self):
        with (
            temp_directory(change_into=True),
            self.assertRaisesRegex(
                VersionError, "pyproject.toml file not found."
            ),
        ):
            cmd = PythonVersionCommand(PEP440VersioningScheme)
            cmd.get_current_version()

    def test_no_version_module(self):
        with (
            temp_file(
                '[tool.pontos.version]\nversion-module-file = "foo.py"',
                name="pyproject.toml",
                change_into=True,
            ),
            self.assertRaisesRegex(
                VersionError,
                r"Could not load version from 'foo'\. .* not found.",
            ),
        ):
            cmd = PythonVersionCommand(PEP440VersioningScheme)
            cmd.get_current_version()

    def test_get_current_version(self):
        with temp_python_module(
            "__version__ = '1.2.3'", name="foo", change_into=True
        ) as tmp_module:
            tmp_file = tmp_module.parent / "pyproject.toml"
            tmp_file.write_text(
                '[tool.poetry]\nversion = "1.2.3"\n'
                '[tool.pontos.version]\nversion-module-file = "foo.py"',
                encoding="utf8",
            )
            cmd = PythonVersionCommand(PEP440VersioningScheme)
            version = cmd.get_current_version()

            self.assertEqual(
                version, PEP440VersioningScheme.parse_version("1.2.3")
            )

    def test_get_current_semantic_version(self):
        with temp_python_module(
            "__version__ = '1.2.3a1'", name="foo", change_into=True
        ) as tmp_module:
            tmp_file = tmp_module.parent / "pyproject.toml"
            tmp_file.write_text(
                '[tool.poetry]\nversion = "1.2.3a1"\n'
                '[tool.pontos.version]\nversion-module-file = "foo.py"',
                encoding="utf8",
            )
            cmd = PythonVersionCommand(SemanticVersioningScheme)
            version = cmd.get_current_version()

            self.assertEqual(
                version, PEP440VersioningScheme.parse_version("1.2.3a1")
            )
            self.assertIsInstance(version, PEP440VersioningScheme.version_cls)


class UpdatePythonVersionTestCase(unittest.TestCase):
    def test_update_version_file(self):
        content = "__version__ = '21.1'"
        with temp_python_module(content, name="foo", change_into=True) as temp:
            tmp_file = temp.parent / "pyproject.toml"
            tmp_file.write_text(
                '[tool.poetry]\nversion = "1.2.3"\n'
                '[tool.pontos.version]\nversion-module-file = "foo.py"',
                encoding="utf8",
            )

            cmd = PythonVersionCommand(PEP440VersioningScheme)
            new_version = PEP440VersioningScheme.parse_version("22.2")
            previous_version = PEP440VersioningScheme.parse_version("21.1")

            updated = cmd.update_version(new_version)

            self.assertEqual(updated.new, new_version)
            self.assertEqual(updated.previous, previous_version)
            self.assertEqual(
                updated.changed_files, [Path("foo.py"), tmp_file.resolve()]
            )

            text = temp.read_text(encoding="utf8")

        *_, version_line, _last_line = text.split("\n")

        self.assertEqual(version_line, '__version__ = "22.2"')

    def test_empty_pyproject_toml(self):
        with (
            temp_file("", name="pyproject.toml", change_into=True),
            self.assertRaisesRegex(
                VersionError,
                r"\[tool.pontos.version\] section missing in .*pyproject\.toml\.",
            ),
        ):
            cmd = PythonVersionCommand(PEP440VersioningScheme)
            new_version = PEP440VersioningScheme.parse_version("22.1.2")
            cmd.update_version(new_version)

    def test_empty_tool_section(self):
        with (
            temp_file("[tool]", name="pyproject.toml", change_into=True),
            self.assertRaisesRegex(
                VersionError,
                r"\[tool.pontos.version\] section missing in .*pyproject\.toml\.",
            ),
        ):
            cmd = PythonVersionCommand(PEP440VersioningScheme)
            new_version = PEP440VersioningScheme.parse_version("22.1.2")
            cmd.update_version(new_version)

    def test_empty_tool_poetry_section(self):
        content = "__version__ = '22.1'"
        with temp_python_module(content, name="foo", change_into=True) as temp:
            tmp_file = temp.parent / "pyproject.toml"
            tmp_file.write_text(
                "[tool.poetry]\n"
                '[tool.pontos.version]\nversion-module-file = "foo.py"',
                encoding="utf8",
            )
            cmd = PythonVersionCommand(PEP440VersioningScheme)
            new_version = PEP440VersioningScheme.parse_version("22.2")
            previous_version = PEP440VersioningScheme.parse_version("22.1")
            updated = cmd.update_version(new_version)

            self.assertEqual(updated.new, new_version)
            self.assertEqual(updated.previous, previous_version)
            self.assertEqual(
                updated.changed_files, [Path("foo.py"), tmp_file.resolve()]
            )

            text = tmp_file.read_text(encoding="utf8")

            toml = tomlkit.parse(text)

            self.assertEqual(toml["tool"]["poetry"]["version"], "22.2")

    def test_override_existing_version(self):
        content = "__version__ = '1.2.3'"
        with temp_python_module(content, name="foo", change_into=True) as temp:
            tmp_file = temp.parent / "pyproject.toml"
            tmp_file.write_text(
                '[tool.poetry]\nversion = "1.2.3"\n'
                '[tool.pontos.version]\nversion-module-file = "foo.py"',
                encoding="utf8",
            )
            cmd = PythonVersionCommand(PEP440VersioningScheme)
            new_version = PEP440VersioningScheme.parse_version("22.2")
            previous_version = PEP440VersioningScheme.parse_version("1.2.3")
            updated = cmd.update_version(new_version)

            self.assertEqual(updated.new, new_version)
            self.assertEqual(updated.previous, previous_version)
            self.assertEqual(
                updated.changed_files, [Path("foo.py"), tmp_file.resolve()]
            )

            text = tmp_file.read_text(encoding="utf8")

            toml = tomlkit.parse(text)

            self.assertEqual(toml["tool"]["poetry"]["version"], "22.2")

    def test_development_version(self):
        content = "__version__ = '1.2.3'"
        with temp_python_module(content, name="foo", change_into=True) as temp:
            tmp_file = temp.parent / "pyproject.toml"
            tmp_file.write_text(
                '[tool.poetry]\nversion = "1.2.3"\n'
                '[tool.pontos.version]\nversion-module-file = "foo.py"',
                encoding="utf8",
            )
            cmd = PythonVersionCommand(PEP440VersioningScheme)
            new_version = PEP440VersioningScheme.parse_version("22.2.dev1")
            previous_version = PEP440VersioningScheme.parse_version("1.2.3")
            updated = cmd.update_version(new_version)

            self.assertEqual(updated.new, new_version)
            self.assertEqual(updated.previous, previous_version)
            self.assertEqual(
                updated.changed_files, [Path("foo.py"), tmp_file.resolve()]
            )

            text = tmp_file.read_text(encoding="utf8")

            toml = tomlkit.parse(text)

            self.assertEqual(toml["tool"]["poetry"]["version"], "22.2.dev1")

    def test_no_update(self):
        content = "__version__ = '1.2.3'"
        with temp_python_module(content, name="foo", change_into=True) as temp:
            tmp_file = temp.parent / "pyproject.toml"
            tmp_file.write_text(
                '[tool.poetry]\nversion = "1.2.3"\n'
                '[tool.pontos.version]\nversion-module-file = "foo.py"',
                encoding="utf8",
            )
            cmd = PythonVersionCommand(PEP440VersioningScheme)
            new_version = PEP440VersioningScheme.parse_version("1.2.3")
            updated = cmd.update_version(new_version)

            self.assertEqual(updated.new, new_version)
            self.assertEqual(updated.previous, new_version)
            self.assertEqual(updated.changed_files, [])

    def test_forced_updated(self):
        content = "__version__ = '1.2.3'"
        with temp_python_module(content, name="foo", change_into=True) as temp:
            tmp_file = temp.parent / "pyproject.toml"
            tmp_file.write_text(
                '[tool.poetry]\nversion = "1.2.3"\n'
                '[tool.pontos.version]\nversion-module-file = "foo.py"',
                encoding="utf8",
            )
            cmd = PythonVersionCommand(PEP440VersioningScheme)
            new_version = PEP440VersioningScheme.parse_version("1.2.3")
            updated = cmd.update_version(new_version, force=True)

            self.assertEqual(updated.new, new_version)
            self.assertEqual(updated.previous, new_version)
            self.assertEqual(
                updated.changed_files, [Path("foo.py"), tmp_file.resolve()]
            )

            text = tmp_file.read_text(encoding="utf8")

            toml = tomlkit.parse(text)

            self.assertEqual(toml["tool"]["poetry"]["version"], "1.2.3")


class VerifyVersionTestCase(unittest.TestCase):
    def test_current_version_not_equal_pyproject_toml_version(self):
        fake_version_py = Path("foo.py")
        with (
            patch.object(
                PythonVersionCommand,
                "get_current_version",
                MagicMock(
                    return_value=PEP440VersioningScheme.parse_version("1.2.3")
                ),
            ),
            patch.object(
                PythonVersionCommand,
                "version_file_path",
                new=PropertyMock(return_value=fake_version_py),
            ),
            self.assertRaisesRegex(
                VersionError,
                "The version .* in .* doesn't match the current version .*.",
            ),
        ):
            cmd = PythonVersionCommand(PEP440VersioningScheme)
            version = PEP440VersioningScheme.parse_version("1.2.3")
            cmd.verify_version(version)

    def test_current_version(self):
        fake_version_py = Path("foo.py")
        content = (
            '[tool.poetry]\nversion = "1.2.3"\n'
            '[tool.pontos.version]\nversion-module-file = "foo.py"'
        )

        with (
            temp_file(content, name="pyproject.toml", change_into=True),
            patch.object(
                PythonVersionCommand,
                "get_current_version",
                MagicMock(
                    return_value=PEP440VersioningScheme.parse_version("1.2.3")
                ),
            ),
            patch.object(
                PythonVersionCommand,
                "version_file_path",
                new=PropertyMock(return_value=fake_version_py),
            ),
        ):
            cmd = PythonVersionCommand(PEP440VersioningScheme)
            cmd.verify_version("current")

    def test_current_failure(self):
        fake_version_py = Path("foo.py")
        content = (
            '[tool.poetry]\nversion = "1.2.4"\n'
            '[tool.pontos.version]\nversion-module-file = "foo.py"'
        )

        with (
            temp_file(content, name="pyproject.toml", change_into=True),
            patch.object(
                PythonVersionCommand,
                "get_current_version",
                MagicMock(
                    return_value=PEP440VersioningScheme.parse_version("1.2.3")
                ),
            ),
            patch.object(
                PythonVersionCommand,
                "version_file_path",
                new=PropertyMock(return_value=fake_version_py),
            ),
            self.assertRaisesRegex(
                VersionError,
                "The version .* in .* doesn't match the current version .*.",
            ),
        ):
            cmd = PythonVersionCommand(PEP440VersioningScheme)
            cmd.verify_version("current")

    def test_provided_version_mismatch(self):
        fake_version_py = Path("foo.py")
        content = (
            '[tool.poetry]\nversion = "1.2.3"\n'
            '[tool.pontos.version]\nversion-module-file = "foo.py"'
        )

        with (
            temp_file(content, name="pyproject.toml", change_into=True),
            patch.object(
                PythonVersionCommand,
                "get_current_version",
                MagicMock(
                    return_value=PEP440VersioningScheme.parse_version("1.2.3")
                ),
            ),
            patch.object(
                PythonVersionCommand,
                "version_file_path",
                new=PropertyMock(return_value=fake_version_py),
            ),
        ):
            with self.assertRaisesRegex(
                VersionError,
                "Provided version .* does not match the current version .*.",
            ):
                cmd = PythonVersionCommand(PEP440VersioningScheme)
                version = PEP440VersioningScheme.parse_version("1.2.4")
                cmd.verify_version(version)

    def test_verify_success(self):
        fake_version_py = Path("foo.py")
        content = (
            '[tool.poetry]\nversion = "1.2.3"\n'
            '[tool.pontos.version]\nversion-module-file = "foo.py"'
        )

        with (
            temp_file(content, name="pyproject.toml", change_into=True),
            patch.object(
                PythonVersionCommand,
                "get_current_version",
                MagicMock(
                    return_value=PEP440VersioningScheme.parse_version("1.2.3")
                ),
            ),
            patch.object(
                PythonVersionCommand,
                "version_file_path",
                new=PropertyMock(return_value=fake_version_py),
            ),
        ):
            cmd = PythonVersionCommand(PEP440VersioningScheme)
            version = PEP440VersioningScheme.parse_version("1.2.3")

            cmd.verify_version(version)


class ProjectFilePythonVersionCommandTestCase(unittest.TestCase):
    def test_project_file_not_found(self):
        with temp_directory(change_into=True):
            cmd = PythonVersionCommand(PEP440VersioningScheme)

            self.assertFalse(cmd.project_found())

    def test_project_file_found(self):
        with temp_file(name="pyproject.toml", change_into=True):
            cmd = PythonVersionCommand(PEP440VersioningScheme)

            self.assertTrue(cmd.project_found())
pontos-25.3.2/tests/version/schemes/000077500000000000000000000000001476255566300174265ustar00rootroot00000000000000pontos-25.3.2/tests/version/schemes/__init__.py000066400000000000000000000001351476255566300215360ustar00rootroot00000000000000# Copyright (C) 2023 Greenbone Networks GmbH
#
# SPDX-License-Identifier: GPL-3.0-or-later
#
pontos-25.3.2/tests/version/schemes/test_pep440.py000066400000000000000000001061751476255566300220650ustar00rootroot00000000000000# Copyright (C) 2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later
#

import unittest
from datetime import datetime

from pontos.version._errors import VersionError
from pontos.version.schemes._pep440 import PEP440Version as Version
from pontos.version.schemes._pep440 import (
    PEP440VersionCalculator as VersionCalculator,
)
from pontos.version.schemes._semantic import SemanticVersion


class PEP440VersionTestCase(unittest.TestCase):
    def test_parse_version(self):
        versions = [
            "0.0.1",
            "1.2.3",
            "1.2.3.post1",
            "1.2.3a1",
            "1.2.3b1",
            "1.2.3rc1",
            "1.2.3a1+dev1",
            "1.2.3a1.dev1",
            "22.4.1",
            "22.4.1.dev1",
            "22.4.1.dev3",
            "2022.4.1.dev3",
            "2022.4.1",
        ]
        for version in versions:
            self.assertEqual(Version.from_string(version), Version(version))

    def test_parse_version_from_semver(self):
        versions = [
            "0.0.1",
            "1.2.3",
            "1.2.3-post1.dev1",
            "1.2.3-a1",
            "1.2.3-b1",
            "1.2.3-rc1",
            "1.2.3-a1+dev1",
            "1.2.3-a1-dev1",
            "1.4.1",
            "2.4.1-dev1",
            "2.4.1-dev3",
        ]
        for version in versions:
            self.assertEqual(Version.from_string(version), Version(version))

    def test_parsed_version(self):
        versions = [
            "0.0.1",
            "1.2.3",
            "1.2.3-post1.dev1",
            "1.2.3-a1",
            "1.2.3-b1",
            "1.2.3-rc1",
            "1.2.3-a1+dev1",
            "1.2.3-a1-dev1",
            "1.4.1",
            "2.4.1-dev1",
            "2.4.1-dev3",
            "1.2.3.post1",
            "1.2.3a1",
            "1.2.3b1",
            "1.2.3rc1",
            "1.2.3a1+dev1",
            "1.2.3a1.dev1",
            "22.4.1.dev1",
            "22.4.1.dev3",
            "2022.4.1.dev3",
        ]
        for version in versions:
            self.assertEqual(
                Version.from_string(version).parsed_version, version
            )

        semver_version = SemanticVersion.from_string("22.4.1-dev1")
        pep440_version = Version.from_version(semver_version)

        self.assertEqual(str(pep440_version), "22.4.1.dev1")
        self.assertEqual(pep440_version.parsed_version, "22.4.1-dev1")

    def test_parse_error(self):
        versions = [
            "abc",
            "1.2.3d",
        ]

        for version in versions:
            with self.assertRaisesRegex(
                VersionError, "^Invalid version: '.*'$"
            ):
                Version.from_string(version)

    def test_equal(self):
        versions = [
            ("1.0.0", "1.0.0"),
            ("1.0.0+dev1", "1.0.0+dev1"),
            ("1.0.0.dev1", "1.0.0.dev1"),
            ("1.0.0-alpha1", "1.0.0-alpha1"),
            ("1.0.0a1", "1.0.0-alpha1"),
            ("1.0.0-alpha1+dev1", "1.0.0-alpha1+dev1"),
            ("1.0.0-alpha1.dev1", "1.0.0-alpha1.dev1"),
            ("1.0.0a1.dev1", "1.0.0-alpha1-dev1"),
            ("1.0.0-beta1", "1.0.0-beta1"),
            ("1.0.0b1", "1.0.0-beta1"),
            ("1.0.0-rc1", "1.0.0-rc1"),
            ("1.0.0rc1", "1.0.0-rc1"),
        ]
        for version1, version2 in versions:
            self.assertTrue(
                Version.from_string(version1) == Version.from_string(version2),
                f"{version1} does not equal {version2}",
            )

        versions = [
            ("1.0.0", "1.0.1"),
            ("1.0.0", "1.0.0+dev1"),
            ("1.0.0", "1.0.0.dev1"),
            ("1.0.0", "1.0.0-alpha1"),
            ("1.0.0", "1.0.0-alpha1"),
            ("1.0.0", "1.0.0-beta1"),
            ("1.0.0.dev1", "1.0.0.dev2"),
            ("1.0.0+dev1", "1.0.0+dev2"),
            ("1.0.0-alpha1", "1.0.0-beta1"),
            ("1.0.0-alpha1", "1.0.0-alpha1+dev1"),
            ("1.0.0-alpha1", "1.0.0-alpha1-dev1"),
            ("1.0.0-alpha1-dev1", "1.0.0-alpha1-dev2"),
            ("1.0.0-alpha1+dev1", "1.0.0-alpha1+dev2"),
            ("1.0.0-beta1", "1.0.0-beta1+dev1"),
            ("1.0.0-beta1-dev1", "1.0.0-beta1-dev2"),
            ("1.0.0-beta1+dev1", "1.0.0-beta1+dev2"),
            ("1.0.0-rc1", "1.0.0-rc1+dev1"),
            ("1.0.0-rc1-dev1", "1.0.0-rc1-dev2"),
            ("1.0.0-rc1+dev1", "1.0.0-rc1+dev2"),
        ]
        for version1, version2 in versions:
            self.assertFalse(
                Version.from_string(version1) == Version.from_string(version2),
                f"{version1} equals {version2}",
            )

        versions = [
            ("1.0.0", "abc"),
            ("1.0.0", None),
        ]
        for version1, version2 in versions:
            self.assertFalse(Version.from_string(version1) == version2)

        versions = [
            ("1.0.0", object()),
            ("1.0.0", 1),
        ]
        for version1, version2 in versions:
            with self.assertRaisesRegex(ValueError, "Can't compare"):
                self.assertFalse(Version.from_string(version1) == version2)

    def test_equal_with_semantic_version(self):
        versions = [
            ("1.0.0", "1.0.0"),
            ("1.0.0.dev1", "1.0.0-dev1"),
            ("1.0.0-alpha1", "1.0.0-alpha1"),
            ("1.0.0-a1", "1.0.0-alpha1"),
            ("1.0.0-alpha1.dev1", "1.0.0-alpha1-dev1"),
            ("1.0.0a1.dev1", "1.0.0-alpha1-dev1"),
            ("1.0.0-beta1", "1.0.0-beta1"),
            ("1.0.0-b1", "1.0.0-beta1"),
            ("1.0.0-rc1", "1.0.0-rc1"),
            ("1.0.0a1", "1.0.0-alpha1"),
            ("1.0.0b1", "1.0.0-beta1"),
            ("1.0.0rc1", "1.0.0-rc1"),
            ("1.0.0+dev1", "1.0.0+dev1"),
            ("1.0.0a1+dev1", "1.0.0-alpha1+dev1"),
            ("1.0.0b1+dev1", "1.0.0-beta1+dev1"),
            ("1.0.0rc1+dev1", "1.0.0-rc1+dev1"),
        ]
        for version1, version2 in versions:
            pep440 = Version.from_string(version1)
            semver = SemanticVersion.from_string(version2)
            self.assertTrue(
                pep440 == semver,
                f"{pep440!r} {version1} does not equal {semver!r} {version2}",
            )

        versions = []
        for version1, version2 in versions:
            pep440 = Version.from_string(version1)
            semver = SemanticVersion.from_string(version2)
            self.assertFalse(
                pep440 == semver,
                f"{pep440!r} {version1} equals {semver!r} {version2}",
            )

    def test_not_equal(self):
        versions = [
            ("1.0.0", "1.0.0"),
            ("1.0.0.dev1", "1.0.0.dev1"),
            ("1.0.0+dev1", "1.0.0+dev1"),
            ("1.0.0-alpha1", "1.0.0-alpha1"),
            ("1.0.0-alpha1.dev1", "1.0.0-alpha1.dev1"),
            ("1.0.0-alpha1+dev1", "1.0.0-alpha1+dev1"),
            ("1.0.0-beta1", "1.0.0-beta1"),
            ("1.0.0-rc1", "1.0.0-rc1"),
        ]
        for version1, version2 in versions:
            self.assertFalse(
                Version.from_string(version1) != Version.from_string(version2),
                f"{version1} does not equal {version2}",
            )

        versions = [
            ("1.0.0", "1.0.1"),
            ("1.0.0", "1.0.0+dev1"),
            ("1.0.0", "1.0.0.dev1"),
            ("1.0.0", "1.0.0-alpha1"),
            ("1.0.0", "1.0.0-alpha1"),
            ("1.0.0", "1.0.0-beta1"),
            ("1.0.0.dev1", "1.0.0.dev2"),
            ("1.0.0+dev1", "1.0.0+dev2"),
            ("1.0.0-alpha1", "1.0.0-beta1"),
            ("1.0.0-alpha1", "1.0.0-alpha1+dev1"),
            ("1.0.0-alpha1", "1.0.0-alpha1.dev1"),
            ("1.0.0-alpha1.dev1", "1.0.0-alpha1.dev2"),
            ("1.0.0-alpha1+dev1", "1.0.0-alpha1+dev2"),
            ("1.0.0-beta1", "1.0.0-beta1+dev1"),
            ("1.0.0-beta1", "1.0.0-beta1.dev1"),
            ("1.0.0-beta1.dev1", "1.0.0-beta1.dev2"),
            ("1.0.0-beta1+dev1", "1.0.0-beta1+dev2"),
            ("1.0.0-rc1", "1.0.0-rc1+dev1"),
            ("1.0.0-rc1", "1.0.0-rc1.dev1"),
            ("1.0.0-rc1.dev1", "1.0.0-rc1.dev2"),
            ("1.0.0-rc1+dev1", "1.0.0-rc1+dev2"),
        ]
        for version1, version2 in versions:
            self.assertTrue(
                Version.from_string(version1) != Version.from_string(version2),
                f"{version1} equals {version2}",
            )

        versions = [
            ("1.0.0", "abc"),
            ("1.0.0", None),
        ]
        for version1, version2 in versions:
            self.assertTrue(Version.from_string(version1) != version2)

        versions = [
            ("1.0.0", object()),
            ("1.0.0", 1),
            ("1.0.0", True),
        ]
        for version1, version2 in versions:
            with self.assertRaisesRegex(ValueError, "Can't compare"):
                self.assertFalse(Version.from_string(version1) != version2)

    def test_greater_then(self):
        versions = [
            ("1.0.0", "0.9.9999"),
            ("1.0.1", "1.0.0"),
            ("1.0.0", "1.0.0.dev1"),
            ("1.0.0", "1.0.0-alpha1"),
            ("1.0.0", "1.0.0-alpha1"),
            ("1.0.0", "1.0.0-beta1"),
            ("1.0.0", "1.0.0-rc1"),
            ("1.0.0-alpha1", "1.0.0-dev1"),
            ("1.0.0-alpha1", "1.0.0-alpha1.dev1"),
            ("1.0.0-alpha2", "1.0.0-alpha1"),
            ("1.0.0-beta1", "1.0.0-dev1"),
            ("1.0.0-beta1", "1.0.0-alpha1"),
            ("1.0.0-beta1", "1.0.0-beta1.dev1"),
            ("1.0.0-beta2", "1.0.0-beta1"),
            ("1.0.0-rc1", "1.0.0-dev1"),
            ("1.0.0-rc1", "1.0.0-alpha1"),
            ("1.0.0-rc1", "1.0.0-beta1"),
            ("1.0.0-rc1", "1.0.0-rc1.dev1"),
            ("1.0.0-rc2", "1.0.0-rc1"),
        ]
        for version1, version2 in versions:
            self.assertTrue(
                Version.from_string(version1) > Version.from_string(version2),
                f"{version1} should be greater then {version2}",
            )

        versions = [
            ("1.0.0", "1.0.0"),
            ("1.0.0", "1.0.0+dev1"),
            ("1.0.0.dev1", "1.0.0.dev1"),
            ("1.0.0.dev1", "1.0.0.dev2"),
            ("1.0.0", "1.0.1"),
            ("1.0.0-dev1", "1.0.0"),
            ("1.0.0-dev1", "1.0.0-alpha1"),
            ("1.0.0-dev1", "1.0.0-beta1"),
            ("1.0.0-dev1", "1.0.0-rc1"),
            ("1.0.0+dev1", "1.0.0+dev1"),
            ("1.0.0+dev1", "1.0.0+dev2"),
            ("1.0.0-alpha1", "1.0.0-alpha1"),
            ("1.0.0-alpha1", "1.0.0-beta1"),
            ("1.0.0-alpha1", "1.0.0-rc1"),
            ("1.0.0-alpha1", "1.0.0-alpha1+dev1"),
            ("1.0.0-alpha1.dev1", "1.0.0-alpha1.dev1"),
            ("1.0.0-alpha1.dev1", "1.0.0-alpha1.dev2"),
            ("1.0.0-alpha1+dev1", "1.0.0-alpha1+dev2"),
            ("1.0.0-beta1", "1.0.0-rc1"),
            ("1.0.0-beta1", "1.0.0-beta1"),
            ("1.0.0-beta1", "1.0.0-beta1+dev1"),
            ("1.0.0-beta1.dev1", "1.0.0-beta1.dev1"),
            ("1.0.0-beta1.dev1", "1.0.0-beta1.dev2"),
            ("1.0.0-beta1+dev1", "1.0.0-beta1+dev2"),
            ("1.0.0-rc1", "1.0.0"),
            ("1.0.0-rc1", "1.0.0-rc1"),
            ("1.0.0-rc1", "1.0.0-rc1+dev1"),
            ("1.0.0-rc1.dev1", "1.0.0-rc1.dev1"),
            ("1.0.0-rc1.dev1", "1.0.0-rc1.dev2"),
            ("1.0.0-rc1+dev1", "1.0.0-rc1+dev2"),
        ]
        for version1, version2 in versions:
            self.assertFalse(
                Version.from_string(version1) > Version.from_string(version2),
                f"{version1} should not be greater then {version2}",
            )

        versions = [
            ("1.0.0", object()),
            ("1.0.0", 1),
            ("1.0.0", True),
        ]
        for version1, version2 in versions:
            with self.assertRaisesRegex(ValueError, "Can't compare"):
                self.assertFalse(Version.from_string(version1) > version2)

    def test_greater_or_equal_then(self):
        versions = [
            ("1.0.0", "0.9.9999"),
            ("1.0.1", "1.0.0"),
            ("1.0.0", "1.0.0"),
            ("1.0.0", "1.0.0.dev1"),
            ("1.0.0", "1.0.0-alpha1"),
            ("1.0.0", "1.0.0-alpha1"),
            ("1.0.0", "1.0.0-beta1"),
            ("1.0.0", "1.0.0-rc1"),
            ("1.0.0.dev1", "1.0.0.dev1"),
            ("1.0.0+dev1", "1.0.0+dev1"),
            ("1.0.0-alpha1", "1.0.0-dev1"),
            ("1.0.0-alpha1", "1.0.0-alpha1.dev1"),
            ("1.0.0-alpha1", "1.0.0-alpha1"),
            ("1.0.0-alpha2", "1.0.0-alpha1"),
            ("1.0.0-alpha1.dev1", "1.0.0-alpha1.dev1"),
            ("1.0.0-beta1", "1.0.0-dev1"),
            ("1.0.0-beta1", "1.0.0-alpha1"),
            ("1.0.0-beta1", "1.0.0-beta1"),
            ("1.0.0-beta1", "1.0.0-beta1.dev1"),
            ("1.0.0-beta2", "1.0.0-beta1"),
            ("1.0.0-beta1.dev1", "1.0.0-beta1.dev1"),
            ("1.0.0-rc1", "1.0.0-rc1"),
            ("1.0.0-rc1", "1.0.0-dev1"),
            ("1.0.0-rc1", "1.0.0-alpha1"),
            ("1.0.0-rc1", "1.0.0-beta1"),
            ("1.0.0-rc1", "1.0.0-rc1.dev1"),
            ("1.0.0-rc1.dev1", "1.0.0-rc1.dev1"),
            ("1.0.0-rc2", "1.0.0-rc1"),
        ]
        for version1, version2 in versions:
            self.assertTrue(
                Version.from_string(version1) >= Version.from_string(version2),
                f"{version1} should be greater or equal then {version2}",
            )

        versions = [
            ("1.0.0", "1.0.0+dev1"),
            ("1.0.0.dev1", "1.0.0.dev2"),
            ("1.0.0", "1.0.1"),
            ("1.0.0-dev1", "1.0.0"),
            ("1.0.0-dev1", "1.0.0-alpha1"),
            ("1.0.0-dev1", "1.0.0-beta1"),
            ("1.0.0-dev1", "1.0.0-rc1"),
            ("1.0.0+dev1", "1.0.0+dev2"),
            ("1.0.0-alpha1", "1.0.0-beta1"),
            ("1.0.0-alpha1", "1.0.0-rc1"),
            ("1.0.0-alpha1", "1.0.0-alpha1+dev1"),
            ("1.0.0-alpha1.dev1", "1.0.0-alpha1.dev2"),
            ("1.0.0-alpha1+dev1", "1.0.0-alpha1+dev2"),
            ("1.0.0-beta1", "1.0.0-rc1"),
            ("1.0.0-beta1", "1.0.0-beta1+dev1"),
            ("1.0.0-beta1.dev1", "1.0.0-beta1.dev2"),
            ("1.0.0-beta1+dev1", "1.0.0-beta1+dev2"),
            ("1.0.0-rc1", "1.0.0"),
            ("1.0.0-rc1", "1.0.0-rc1+dev1"),
            ("1.0.0-rc1.dev1", "1.0.0-rc1.dev2"),
            ("1.0.0-rc1+dev1", "1.0.0-rc1+dev2"),
        ]
        for version1, version2 in versions:
            self.assertFalse(
                Version.from_string(version1) >= Version.from_string(version2),
                f"{version1} should not be greater or equal then {version2}",
            )

        versions = [
            ("1.0.0", object()),
            ("1.0.0", 1),
            ("1.0.0", True),
        ]
        for version1, version2 in versions:
            with self.assertRaisesRegex(ValueError, "Can't compare"):
                self.assertFalse(Version.from_string(version1) >= version2)

    def test_less_then(self):
        versions = [
            ("1.0.0", "0.9.9999"),
            ("1.0.1", "1.0.0"),
            ("1.0.0", "1.0.0.dev1"),
            ("1.0.0", "1.0.0-alpha1"),
            ("1.0.0", "1.0.0-alpha1"),
            ("1.0.0", "1.0.0-beta1"),
            ("1.0.0", "1.0.0-rc1"),
            ("1.0.0-alpha1", "1.0.0-dev1"),
            ("1.0.0-alpha1", "1.0.0-alpha1.dev1"),
            ("1.0.0-alpha2", "1.0.0-alpha1"),
            ("1.0.0-beta1", "1.0.0-dev1"),
            ("1.0.0-beta1", "1.0.0-alpha1"),
            ("1.0.0-beta1", "1.0.0-beta1.dev1"),
            ("1.0.0-beta2", "1.0.0-beta1"),
            ("1.0.0-rc1", "1.0.0-dev1"),
            ("1.0.0-rc1", "1.0.0-alpha1"),
            ("1.0.0-rc1", "1.0.0-beta1"),
            ("1.0.0-rc1", "1.0.0-rc1.dev1"),
            ("1.0.0-rc2", "1.0.0-rc1"),
        ]
        for version2, version1 in versions:
            self.assertTrue(
                Version.from_string(version1) < Version.from_string(version2),
                f"{version1} should be less then {version2}",
            )

        versions = [
            ("1.0.0", "1.0.0"),
            ("1.0.0", "1.0.0+dev1"),
            ("1.0.0.dev1", "1.0.0.dev1"),
            ("1.0.0.dev1", "1.0.0.dev2"),
            ("1.0.0", "1.0.1"),
            ("1.0.0-dev1", "1.0.0"),
            ("1.0.0-dev1", "1.0.0-alpha1"),
            ("1.0.0-dev1", "1.0.0-beta1"),
            ("1.0.0-dev1", "1.0.0-rc1"),
            ("1.0.0+dev1", "1.0.0+dev1"),
            ("1.0.0+dev1", "1.0.0+dev2"),
            ("1.0.0-alpha1", "1.0.0-alpha1"),
            ("1.0.0-alpha1", "1.0.0-beta1"),
            ("1.0.0-alpha1", "1.0.0-rc1"),
            ("1.0.0-alpha1", "1.0.0-alpha1+dev1"),
            ("1.0.0-alpha1.dev1", "1.0.0-alpha1.dev1"),
            ("1.0.0-alpha1.dev1", "1.0.0-alpha1.dev2"),
            ("1.0.0-alpha1+dev1", "1.0.0-alpha1+dev2"),
            ("1.0.0-beta1", "1.0.0-rc1"),
            ("1.0.0-beta1", "1.0.0-beta1"),
            ("1.0.0-beta1", "1.0.0-beta1+dev1"),
            ("1.0.0-beta1.dev1", "1.0.0-beta1.dev1"),
            ("1.0.0-beta1.dev1", "1.0.0-beta1.dev2"),
            ("1.0.0-beta1+dev1", "1.0.0-beta1+dev2"),
            ("1.0.0-rc1", "1.0.0"),
            ("1.0.0-rc1", "1.0.0-rc1"),
            ("1.0.0-rc1", "1.0.0-rc1+dev1"),
            ("1.0.0-rc1.dev1", "1.0.0-rc1.dev1"),
            ("1.0.0-rc1.dev1", "1.0.0-rc1.dev2"),
            ("1.0.0-rc1+dev1", "1.0.0-rc1+dev2"),
        ]
        for version2, version1 in versions:
            self.assertFalse(
                Version.from_string(version1) < Version.from_string(version2),
                f"{version1} should not be less then {version2}",
            )

        versions = [
            ("1.0.0", object()),
            ("1.0.0", 1),
            ("1.0.0", True),
        ]
        for version1, version2 in versions:
            with self.assertRaisesRegex(ValueError, "Can't compare"):
                self.assertFalse(Version.from_string(version1) < version2)

    def test_less_or_equal_then(self):
        versions = [
            ("1.0.0", "0.9.9999"),
            ("1.0.1", "1.0.0"),
            ("1.0.0", "1.0.0"),
            ("1.0.0", "1.0.0.dev1"),
            ("1.0.0", "1.0.0-alpha1"),
            ("1.0.0", "1.0.0-alpha1"),
            ("1.0.0", "1.0.0-beta1"),
            ("1.0.0", "1.0.0-rc1"),
            ("1.0.0.dev1", "1.0.0.dev1"),
            ("1.0.0+dev1", "1.0.0+dev1"),
            ("1.0.0-alpha1", "1.0.0-dev1"),
            ("1.0.0-alpha1", "1.0.0-alpha1.dev1"),
            ("1.0.0-alpha1", "1.0.0-alpha1"),
            ("1.0.0-alpha2", "1.0.0-alpha1"),
            ("1.0.0-alpha1.dev1", "1.0.0-alpha1.dev1"),
            ("1.0.0-beta1", "1.0.0-dev1"),
            ("1.0.0-beta1", "1.0.0-alpha1"),
            ("1.0.0-beta1", "1.0.0-beta1"),
            ("1.0.0-beta1", "1.0.0-beta1.dev1"),
            ("1.0.0-beta2", "1.0.0-beta1"),
            ("1.0.0-beta1.dev1", "1.0.0-beta1.dev1"),
            ("1.0.0-rc1", "1.0.0-rc1"),
            ("1.0.0-rc1", "1.0.0-dev1"),
            ("1.0.0-rc1", "1.0.0-alpha1"),
            ("1.0.0-rc1", "1.0.0-beta1"),
            ("1.0.0-rc1", "1.0.0-rc1.dev1"),
            ("1.0.0-rc1.dev1", "1.0.0-rc1.dev1"),
            ("1.0.0-rc2", "1.0.0-rc1"),
        ]
        for version2, version1 in versions:
            self.assertTrue(
                Version.from_string(version1) <= Version.from_string(version2),
                f"{version1} should be less or equal then {version2}",
            )

        versions = [
            ("1.0.0", "1.0.0+dev1"),
            ("1.0.0.dev1", "1.0.0.dev2"),
            ("1.0.0", "1.0.1"),
            ("1.0.0-dev1", "1.0.0"),
            ("1.0.0-dev1", "1.0.0-alpha1"),
            ("1.0.0-dev1", "1.0.0-beta1"),
            ("1.0.0-dev1", "1.0.0-rc1"),
            ("1.0.0+dev1", "1.0.0+dev2"),
            ("1.0.0-alpha1", "1.0.0-beta1"),
            ("1.0.0-alpha1", "1.0.0-rc1"),
            ("1.0.0-alpha1", "1.0.0-alpha1+dev1"),
            ("1.0.0-alpha1.dev1", "1.0.0-alpha1.dev2"),
            ("1.0.0-alpha1+dev1", "1.0.0-alpha1+dev2"),
            ("1.0.0-beta1", "1.0.0-rc1"),
            ("1.0.0-beta1", "1.0.0-beta1+dev1"),
            ("1.0.0-beta1.dev1", "1.0.0-beta1.dev2"),
            ("1.0.0-beta1+dev1", "1.0.0-beta1+dev2"),
            ("1.0.0-rc1", "1.0.0"),
            ("1.0.0-rc1", "1.0.0-rc1+dev1"),
            ("1.0.0-rc1.dev1", "1.0.0-rc1.dev2"),
            ("1.0.0-rc1+dev1", "1.0.0-rc1+dev2"),
        ]
        for version2, version1 in versions:
            self.assertFalse(
                Version.from_string(version1) <= Version.from_string(version2),
                f"{version1} should not be less or equal then {version2}",
            )

        versions = [
            ("1.0.0", object()),
            ("1.0.0", 1),
            ("1.0.0", True),
        ]
        for version1, version2 in versions:
            with self.assertRaisesRegex(ValueError, "Can't compare"):
                self.assertFalse(Version.from_string(version1) <= version2)

    def test_is_dev_release(self):
        versions = [
            "1.0.0.dev1",
            "1.0.0dev1",
            "1.0.0-alpha1.dev1",
            "1.0.0-beta1.dev1",
            "1.0.0-rc1.dev1",
        ]
        for version in versions:
            self.assertTrue(
                Version.from_string(version).is_dev_release,
                f"{version} is not a dev release",
            )

        versions = [
            "1.0.0",
            "1.0.0+dev1",
            "1.0.0a1",
            "1.0.0b1",
            "1.0.0rc1",
            "1.0.0-alpha1",
            "1.0.0-beta1",
            "1.0.0-rc1",
            "1.0.0a1+dev1",
            "1.0.0b1+dev1",
            "1.0.0rc1+dev1",
        ]
        for version in versions:
            self.assertFalse(
                Version.from_string(version).is_dev_release,
                f"{version} is a dev release",
            )

    def test_is_alpha_release(self):
        versions = [
            "1.0.0-alpha1",
            "1.0.0-a1",
            "1.0.0a1",
            "1.0.0a1+foo",
            "1.0.0a1.dev1",
        ]
        for version in versions:
            self.assertTrue(
                Version.from_string(version).is_alpha_release,
                f"{version} is not an alpha release",
            )

        versions = [
            "1.0.0",
            "1.0.0.dev1",
            "1.0.0b1",
            "1.0.0rc1",
        ]
        for version in versions:
            self.assertFalse(
                Version.from_string(version).is_alpha_release,
                f"{version} is an alpha release",
            )

    def test_is_beta_release(self):
        versions = [
            "1.0.0-beta1",
            "1.0.0-b1",
            "1.0.0b1",
            "1.0.0b1+foo",
            "1.0.0b1.dev1",
        ]
        for version in versions:
            self.assertTrue(
                Version.from_string(version).is_beta_release,
                f"{version} is not a beta release",
            )

        versions = [
            "1.0.0",
            "1.0.0.dev1",
            "1.0.0a1",
            "1.0.0rc1",
        ]
        for version in versions:
            self.assertFalse(
                Version.from_string(version).is_beta_release,
                f"{version} is a beta release",
            )

    def test_is_release_candidate(self):
        versions = [
            "1.0.0-rc1",
            "1.0.0rc1",
            "1.0.0rc1+foo",
            "1.0.0rc1.dev1",
        ]
        for version in versions:
            self.assertTrue(
                Version.from_string(version).is_release_candidate,
                f"{version} is not a release candidate",
            )

        versions = [
            "1.0.0",
            "1.0.0.dev1",
            "1.0.0a1",
            "1.0.0b1",
        ]
        for version in versions:
            self.assertFalse(
                Version.from_string(version).is_release_candidate,
                f"{version} is a release candidate",
            )

    def test_pre(self):
        versions = [
            ("1.0.0", None),
            ("1.0.0.dev1", None),
            ("1.0.0-dev1", None),
            ("1.0.0a1", ("alpha", 1)),
            ("1.0.0-alpha1", ("alpha", 1)),
            ("1.0.0b1", ("beta", 1)),
            ("1.0.0-beta1", ("beta", 1)),
            ("1.0.0rc1", ("rc", 1)),
            ("1.0.0-rc1", ("rc", 1)),
            ("1.0.0rc1+foo1", ("rc", 1)),
            ("1.0.0a1.dev1", ("alpha", 1)),
            ("1.0.0a1.dev1", ("alpha", 1)),
            ("1.0.0b1.dev1", ("beta", 1)),
            ("1.0.0rc1.dev1", ("rc", 1)),
        ]

        for version, expected in versions:
            self.assertEqual(Version.from_string(version).pre, expected)

    def test_local(self):
        versions = [
            ("1.0.0", None),
            ("1.0.0+dev1", ("dev", 1)),
            ("1.0.0-dev1", None),
            ("1.0.0.dev1", None),
            ("1.0.0a1", None),
            ("1.0.0-alpha1", None),
            ("1.0.0b1", None),
            ("1.0.0-beta1", None),
            ("1.0.0rc1", None),
            ("1.0.0-rc1", None),
            ("1.0.0rc1+foo1", ("foo", 1)),
            ("1.0.0rc1+dev1", ("dev", 1)),
            ("1.0.01.dev1", None),
            ("1.0.0b1.dev1", None),
            ("1.0.0rc1.dev1", None),
        ]

        for version, expected in versions:
            version_local = Version.from_string(version).local
            self.assertEqual(
                version_local,
                expected,
                f"{version_local} does not match {expected} for version "
                f"{version}",
            )


class PEP440VersionCalculatorTestCase(unittest.TestCase):
    def test_next_patch_version(self):
        calculator = VersionCalculator()

        versions = [
            ("0.0.1", "0.0.2"),
            ("1.2.3", "1.2.4"),
            ("1.2.3.post1", "1.2.4"),
            ("1.2.3a1", "1.2.3"),
            ("1.2.3b1", "1.2.3"),
            ("1.2.3rc1", "1.2.3"),
            ("1.2.3a1.dev1", "1.2.3"),
            ("1.2.3b1.dev1", "1.2.3"),
            ("1.2.3rc1.dev1", "1.2.3"),
            ("22.4.1", "22.4.2"),
            ("22.4.1.dev1", "22.4.1"),
            ("22.4.1.dev3", "22.4.1"),
            ("2022.4.1.dev3", "2022.4.1"),
        ]

        for current_version, assert_version in versions:
            release_version = calculator.next_patch_version(
                Version.from_string(current_version)
            )

            self.assertEqual(
                release_version,
                Version.from_string(assert_version),
                f"{release_version} is not the expected next patch version "
                f"{assert_version} for {current_version}",
            )

    def test_next_calendar_versions(self):
        calculator = VersionCalculator()
        today = datetime.today()
        year_short = today.year % 100

        current_versions = [
            "21.4.1.dev3",
            f"19.{today.month}.1.dev3",
            f"{year_short}.{today.month}.1.dev3",
            f"{year_short}.{today.month}.1",
            "2022.4.1",
            "2023.5.1",
            f"{today.year}.{today.month}.1.dev2",
            f"{today.year}.{today.month}.1",
        ]
        assert_versions = [
            f"{year_short}.{today.month}.0",
            f"{year_short}.{today.month}.0",
            f"{year_short}.{today.month}.1",
            f"{year_short}.{today.month}.2",
            f"{today.year}.{today.month}.0",
            f"{today.year}.{today.month}.0",
            f"{today.year}.{today.month}.1",
            f"{today.year}.{today.month}.2",
        ]

        for current_version, assert_version in zip(
            current_versions, assert_versions
        ):
            release_version = calculator.next_calendar_version(
                Version.from_string(current_version)
            )
            self.assertEqual(
                release_version, Version.from_string(assert_version)
            )

    def test_next_calendar_version_error(self):
        calculator = VersionCalculator()
        today = datetime.today()
        year_short = today.year % 100

        with self.assertRaisesRegex(VersionError, "'.+' is higher than '.+'."):
            calculator.next_calendar_version(
                Version.from_string(f"{year_short  + 1}.1.0")
            )

        with self.assertRaisesRegex(VersionError, "'.+' is higher than '.+'."):
            calculator.next_calendar_version(
                Version.from_string(f"{year_short}.{today.month + 1}.0")
            )

        with self.assertRaisesRegex(VersionError, "'.+' is higher than '.+'."):
            calculator.next_calendar_version(
                Version.from_string(f"{today.year + 1}.{today.month + 1}.0")
            )

    def test_next_minor_version(self):
        calculator = VersionCalculator()

        versions = [
            ("0.0.1", "0.1.0"),
            ("1.2.3", "1.3.0"),
            ("1.2.3.post1", "1.3.0"),
            ("1.2.3a1", "1.3.0"),
            ("1.2.3b1", "1.3.0"),
            ("1.2.3rc1", "1.3.0"),
            ("1.2.3a1.dev1", "1.3.0"),
            ("1.2.3b1.dev1", "1.3.0"),
            ("1.2.3rc1.dev1", "1.3.0"),
            ("22.4.1", "22.5.0"),
            ("22.4.1.dev1", "22.5.0"),
            ("22.4.1.dev3", "22.5.0"),
            ("1.0.0a1", "1.0.0"),
            ("1.1.0a1", "1.1.0"),
            ("1.0.0.dev1", "1.0.0"),
            ("1.1.0.dev1", "1.1.0"),
            ("2022.1.0.dev1", "2022.1.0"),
        ]

        for current_version, assert_version in versions:
            release_version = calculator.next_minor_version(
                Version.from_string(current_version),
            )
            self.assertEqual(
                release_version,
                Version.from_string(assert_version),
                f"{release_version} is not the expected next minor version "
                f"{assert_version} for {current_version}",
            )

    def test_next_major_version(self):
        calculator = VersionCalculator()

        versions = [
            ("0.0.1", "1.0.0"),
            ("1.2.3", "2.0.0"),
            ("1.2.3.post1", "2.0.0"),
            ("1.2.3a1", "2.0.0"),
            ("1.2.3b1", "2.0.0"),
            ("1.2.3rc1", "2.0.0"),
            ("1.2.3a1.dev1", "2.0.0"),
            ("1.2.3b1.dev1", "2.0.0"),
            ("1.2.3rc1.dev1", "2.0.0"),
            ("22.4.1", "23.0.0"),
            ("22.4.1.dev1", "23.0.0"),
            ("22.4.1.dev3", "23.0.0"),
            ("1.0.0a1", "1.0.0"),
            ("1.1.0a1", "2.0.0"),
            ("1.0.0.dev1", "1.0.0"),
            ("1.1.0.dev1", "2.0.0"),
        ]
        for current_version, assert_version in versions:
            release_version = calculator.next_major_version(
                Version.from_string(current_version),
            )
            self.assertEqual(
                release_version,
                Version.from_string(assert_version),
                f"{release_version} is not the expected next major version "
                f"{assert_version} for {current_version}",
            )

    def test_next_alpha_version(self):
        calculator = VersionCalculator()

        versions = [
            ("0.0.1", "0.0.2a1"),
            ("1.2.3", "1.2.4a1"),
            ("1.2.3.post1", "1.2.4a1"),
            ("1.2.3a1", "1.2.3a2"),
            ("1.2.3b1", "1.2.4a1"),
            ("1.2.3rc1", "1.2.4a1"),
            ("1.2.3a1.dev1", "1.2.3a1"),
            ("1.2.3b1.dev1", "1.2.4a1"),
            ("1.2.3rc1.dev1", "1.2.4a1"),
            ("22.4.1", "22.4.2a1"),
            ("22.4.1.dev1", "22.4.1a1"),
            ("22.4.1.dev3", "22.4.1a1"),
            ("1.0.0a1", "1.0.0a2"),
            ("1.1.0a1", "1.1.0a2"),
            ("1.0.0.dev1", "1.0.0a1"),
            ("1.1.0.dev1", "1.1.0a1"),
        ]
        for current_version, assert_version in versions:
            release_version = calculator.next_alpha_version(
                Version.from_string(current_version)
            )
            self.assertEqual(
                release_version,
                Version.from_string(assert_version),
                f"{release_version} is not the expected next alpha version "
                f"{assert_version} for {current_version}",
            )

    def test_next_beta_version(self):
        calculator = VersionCalculator()

        versions = [
            ("0.0.1", "0.0.2b1"),
            ("1.2.3", "1.2.4b1"),
            ("1.2.3.post1", "1.2.4b1"),
            ("1.2.3a1", "1.2.3b1"),
            ("1.2.3b1", "1.2.3b2"),
            ("1.2.3rc1", "1.2.4b1"),
            ("1.2.3a1.dev1", "1.2.3b1"),
            ("1.2.3b1.dev1", "1.2.3b1"),
            ("1.2.3rc1.dev1", "1.2.4b1"),
            ("22.4.1", "22.4.2b1"),
            ("22.4.1.dev1", "22.4.1b1"),
            ("22.4.1.dev3", "22.4.1b1"),
            ("1.0.0a1", "1.0.0b1"),
            ("1.1.0a1", "1.1.0b1"),
            ("1.0.0.dev1", "1.0.0b1"),
            ("1.1.0.dev1", "1.1.0b1"),
        ]

        for current_version, assert_version in versions:
            release_version = calculator.next_beta_version(
                Version.from_string(current_version)
            )
            self.assertEqual(
                release_version,
                Version.from_string(assert_version),
                f"{release_version} is not the expected next beta version "
                f"{assert_version} for {current_version}",
            )

    def test_next_release_candidate_version(self):
        calculator = VersionCalculator()

        versions = [
            ("0.0.1", "0.0.2rc1"),
            ("1.2.3", "1.2.4rc1"),
            ("1.2.3.post1", "1.2.4rc1"),
            ("1.2.3a1", "1.2.3rc1"),
            ("1.2.3b1", "1.2.3rc1"),
            ("1.2.3rc1", "1.2.3rc2"),
            ("1.2.3a1.dev1", "1.2.3rc1"),
            ("1.2.3b1.dev1", "1.2.3rc1"),
            ("1.2.3rc1.dev1", "1.2.3rc1"),
            ("22.4.1", "22.4.2rc1"),
            ("22.4.1.dev1", "22.4.1rc1"),
            ("22.4.1.dev3", "22.4.1rc1"),
            ("1.0.0a1", "1.0.0rc1"),
            ("1.1.0a1", "1.1.0rc1"),
            ("1.0.0.dev1", "1.0.0rc1"),
            ("1.1.0.dev1", "1.1.0rc1"),
        ]

        for current_version, assert_version in versions:
            release_version = calculator.next_release_candidate_version(
                Version.from_string(current_version)
            )
            self.assertEqual(
                release_version,
                Version.from_string(assert_version),
                f"{release_version} is not the expected next rc version "
                f"{assert_version} for {current_version}",
            )

    def test_next_dev_version(self):
        calculator = VersionCalculator()

        versions = [
            ("0.0.1", "0.0.2.dev1"),
            ("1.2.3", "1.2.4.dev1"),
            ("1.2.3.dev1", "1.2.3.dev2"),
            ("1.2.3.post1", "1.2.4.dev1"),
            ("1.2.3a1", "1.2.3a2.dev1"),
            ("1.2.3b1", "1.2.3b2.dev1"),
            ("1.2.3rc1", "1.2.3rc2.dev1"),
            ("1.2.3a1.dev1", "1.2.3a1.dev2"),
            ("1.2.3b1.dev1", "1.2.3b1.dev2"),
            ("1.2.3rc1.dev1", "1.2.3rc1.dev2"),
            ("22.4.1", "22.4.2.dev1"),
            ("22.4.1.dev1", "22.4.1.dev2"),
            ("22.4.1.dev3", "22.4.1.dev4"),
            ("1.0.0a1", "1.0.0a2.dev1"),
            ("1.1.0a1", "1.1.0a2.dev1"),
            ("1.0.0.dev1", "1.0.0.dev2"),
            ("1.1.0.dev1", "1.1.0.dev2"),
        ]

        for current_version, assert_version in versions:
            release_version = calculator.next_dev_version(
                Version.from_string(current_version)
            )
            self.assertEqual(
                release_version,
                Version.from_string(assert_version),
                f"{release_version} is not the next expected dev version "
                f"{assert_version} for {current_version}",
            )
pontos-25.3.2/tests/version/schemes/test_semantic.py000066400000000000000000001113331476255566300226440ustar00rootroot00000000000000# Copyright (C) 2023 Greenbone Networks GmbH
#
# SPDX-License-Identifier: GPL-3.0-or-later
#

import unittest
from datetime import datetime

from pontos.version._errors import VersionError
from pontos.version.schemes._pep440 import PEP440Version
from pontos.version.schemes._semantic import SemanticVersion as Version
from pontos.version.schemes._semantic import (
    SemanticVersionCalculator as VersionCalculator,
)


class SemanticVersionTestCase(unittest.TestCase):
    def test_parse_version(self):
        versions = [
            "0.0.1",
            "1.2.3",
            "1.2.3-foo1",
            "1.2.3-a1",
            "1.2.3-alpha1",
            "1.2.3-alpha1-dev1",
            "1.2.3-b1",
            "1.2.3-beta1",
            "1.2.3-beta1-dev1",
            "1.2.3-rc1",
            "1.2.3-rc1-dev1",
            "1.2.3-dev1",
            "1.2.3+foo1",
            "22.4.1",
            "22.4.1-dev1",
            "22.4.1-dev3",
        ]
        for version in versions:
            self.assertEqual(Version.from_string(version), Version(version))

    def test_parsed_version(self):
        versions = [
            "0.0.1",
            "1.2.3",
            "1.2.3-foo1",
            "1.2.3-a1",
            "1.2.3-alpha1",
            "1.2.3-alpha1-dev1",
            "1.2.3-b1",
            "1.2.3-beta1",
            "1.2.3-beta1-dev1",
            "1.2.3-rc1",
            "1.2.3-rc1-dev1",
            "1.2.3-dev1",
            "1.2.3+foo1",
            "22.4.1",
            "22.4.1-dev1",
            "22.4.1-dev3",
        ]
        for version in versions:
            self.assertEqual(
                Version.from_string(version).parsed_version, version
            )

        pep440_version = PEP440Version.from_string("22.4.1.dev1")
        semver_version = Version.from_version(pep440_version)

        self.assertEqual(str(semver_version), "22.4.1-dev1")
        self.assertEqual(semver_version.parsed_version, "22.4.1.dev1")

    def test_parse_error(self):
        versions = [
            "abc",
            "1.2.3d",
            "1.2.3.post1",
            "1.2.3a1",
            "1.2.3b1",
            "1.2.3rc1",
            "1.2.3a1+dev1",
            "22.4.1.dev1",
            "22.4.1.dev3",
        ]

        for version in versions:
            with self.assertRaisesRegex(
                VersionError, "^.* is not valid SemVer string$"
            ):
                Version.from_string(version)

    def test_parse_prerelease_error(self):
        versions = [
            "1.2.3-pos1t1",
            "1.2.3-dev1-dev1",
        ]

        for version in versions:
            with self.assertRaisesRegex(
                VersionError, f"^Invalid prerelease .* in {version}"
            ):
                Version.from_string(version)

    def test_equal(self):
        versions = [
            ("1.0.0", "1.0.0"),
            ("1.0.0+dev1", "1.0.0+dev1"),
            ("1.0.0-dev1", "1.0.0-dev1"),
            ("1.0.0-alpha1", "1.0.0-alpha1"),
            ("1.0.0-alpha1+dev1", "1.0.0-alpha1+dev1"),
            ("1.0.0-alpha1-dev1", "1.0.0-alpha1-dev1"),
            ("1.0.0-beta1", "1.0.0-beta1"),
            ("1.0.0-beta1-dev1", "1.0.0-beta1-dev1"),
            ("1.0.0-rc1", "1.0.0-rc1"),
            ("1.0.0-rc1-dev1", "1.0.0-rc1-dev1"),
        ]
        for version1, version2 in versions:
            self.assertTrue(
                Version.from_string(version1) == Version.from_string(version2),
                f"{version1} does not equal {version2}",
            )

        versions = [
            ("1.0.0", "1.0.1"),
            ("1.0.0", "1.0.0+dev1"),
            ("1.0.0", "1.0.0-alpha1"),
            ("1.0.0", "1.0.0-alpha1"),
            ("1.0.0", "1.0.0-beta1"),
            ("1.0.0+dev1", "1.0.0-dev1"),
            ("1.0.0+dev1", "1.0.0+dev2"),
            ("1.0.0-alpha1", "1.0.0-beta1"),
            ("1.0.0-alpha1", "1.0.0-alpha1+dev1"),
            ("1.0.0-alpha1+dev1", "1.0.0-alpha1+dev2"),
            ("1.0.0-beta1", "1.0.0-beta1+dev1"),
            ("1.0.0-beta1+dev1", "1.0.0-beta1+dev2"),
            ("1.0.0-rc1", "1.0.0-rc1+dev1"),
            ("1.0.0-rc1+dev1", "1.0.0-rc1+dev2"),
        ]
        for version1, version2 in versions:
            self.assertFalse(
                Version.from_string(version1) == Version.from_string(version2),
                f"{version1} equals {version2}",
            )

        other = None
        self.assertFalse(Version.from_string("1.0.0") == other)

        versions = [
            ("1.0.0", object()),
            ("1.0.0", 1),
            ("1.0.0", True),
        ]
        for version1, version2 in versions:
            with self.assertRaisesRegex(ValueError, "Can't compare"):
                self.assertFalse(Version.from_string(version1) == version2)

    def test_equal_pep440_version(self):
        versions = [
            ("1.0.0", "1.0.0"),
            ("1.0.0-dev1", "1.0.0-dev1"),
            ("1.0.0-dev1", "1.0.0.dev1"),
            ("1.0.0-alpha1", "1.0.0-alpha1"),
            ("1.0.0-alpha1", "1.0.0a1"),
            ("1.0.0-alpha1-dev1", "1.0.0-alpha1-dev1"),
            ("1.0.0-alpha1-dev1", "1.0.0a1.dev1"),
            ("1.0.0-beta1", "1.0.0-beta1"),
            ("1.0.0-beta1", "1.0.0b1"),
            ("1.0.0-beta1-dev1", "1.0.0-beta1-dev1"),
            ("1.0.0-beta1-dev1", "1.0.0b1.dev1"),
            ("1.0.0-rc1", "1.0.0-rc1"),
            ("1.0.0-rc1-dev1", "1.0.0-rc1.dev1"),
            ("1.0.0-rc1-dev1", "1.0.0rc1.dev1"),
        ]
        for version1, version2 in versions:
            semver = Version.from_string(version1)
            pep440 = PEP440Version.from_string(version2)
            self.assertTrue(
                semver == pep440,
                f"{semver!r} {version1} does not equal {pep440!r} {version2}",
            )

    def test_not_equal(self):
        versions = [
            ("1.0.0", "1.0.0"),
            ("1.0.0+dev1", "1.0.0+dev1"),
            ("1.0.0-alpha1", "1.0.0-alpha1"),
            ("1.0.0-alpha1+dev1", "1.0.0-alpha1+dev1"),
            ("1.0.0-beta1", "1.0.0-beta1"),
            ("1.0.0-rc1", "1.0.0-rc1"),
        ]
        for version1, version2 in versions:
            self.assertFalse(
                Version.from_string(version1) != Version.from_string(version2),
                f"{version1} does not equal {version2}",
            )

        versions = [
            ("1.0.0", "1.0.1"),
            ("1.0.0", "1.0.0+dev1"),
            ("1.0.0", "1.0.0-alpha1"),
            ("1.0.0", "1.0.0-alpha1"),
            ("1.0.0", "1.0.0-beta1"),
            ("1.0.0+dev1", "1.0.0+dev2"),
            ("1.0.0-alpha1", "1.0.0-beta1"),
            ("1.0.0-alpha1", "1.0.0-alpha1+dev1"),
            ("1.0.0-alpha1+dev1", "1.0.0-alpha1+dev2"),
            ("1.0.0-beta1", "1.0.0-beta1+dev1"),
            ("1.0.0-beta1+dev1", "1.0.0-beta1+dev2"),
            ("1.0.0-rc1", "1.0.0-rc1+dev1"),
            ("1.0.0-rc1+dev1", "1.0.0-rc1+dev2"),
        ]
        for version1, version2 in versions:
            self.assertTrue(
                Version.from_string(version1) != Version.from_string(version2),
                f"{version1} equals {version2}",
            )

        versions = [
            ("1.0.0", "abc"),
            ("1.0.0", None),
        ]
        for version1, version2 in versions:
            self.assertTrue(Version.from_string(version1) != version2)

        versions = [
            ("1.0.0", object()),
            ("1.0.0", 1),
            ("1.0.0", True),
        ]
        for version1, version2 in versions:
            with self.assertRaisesRegex(ValueError, "Can't compare"):
                self.assertFalse(Version.from_string(version1) != version2)

    def test_greater_then(self):
        versions = [
            ("1.0.0", "0.9.9999"),
            ("1.0.1", "1.0.0"),
            ("1.0.0", "1.0.0-dev1"),
            ("1.0.0", "1.0.0-alpha1"),
            ("1.0.0", "1.0.0-alpha1"),
            ("1.0.0", "1.0.0-beta1"),
            ("1.0.0", "1.0.0-rc1"),
            ("1.0.0-alpha1", "1.0.0-dev1"),
            ("1.0.0-alpha1", "1.0.0-alpha1-dev1"),
            ("1.0.0-alpha2", "1.0.0-alpha1"),
            ("1.0.0-beta1", "1.0.0-dev1"),
            ("1.0.0-beta1", "1.0.0-alpha1"),
            ("1.0.0-beta1", "1.0.0-beta1-dev1"),
            ("1.0.0-beta2", "1.0.0-beta1"),
            ("1.0.0-rc1", "1.0.0-dev1"),
            ("1.0.0-rc1", "1.0.0-alpha1"),
            ("1.0.0-rc1", "1.0.0-beta1"),
            ("1.0.0-rc1", "1.0.0-rc1-dev1"),
            ("1.0.0-rc2", "1.0.0-rc1"),
        ]
        for version1, version2 in versions:
            self.assertTrue(
                Version.from_string(version1) > Version.from_string(version2),
                f"{version1} should be greater then {version2}",
            )

        versions = [
            ("1.0.0", "1.0.0"),
            ("1.0.0", "1.0.0+dev1"),
            ("1.0.0-dev1", "1.0.0-dev1"),
            ("1.0.0-dev1", "1.0.0-dev2"),
            ("1.0.0", "1.0.1"),
            ("1.0.0-dev1", "1.0.0"),
            ("1.0.0-dev1", "1.0.0-alpha1"),
            ("1.0.0-dev1", "1.0.0-beta1"),
            ("1.0.0-dev1", "1.0.0-rc1"),
            ("1.0.0+dev1", "1.0.0+dev1"),
            ("1.0.0+dev1", "1.0.0+dev2"),
            ("1.0.0-alpha1", "1.0.0-alpha1"),
            ("1.0.0-alpha1", "1.0.0-beta1"),
            ("1.0.0-alpha1", "1.0.0-rc1"),
            ("1.0.0-alpha1", "1.0.0-alpha1+dev1"),
            ("1.0.0-alpha1-dev1", "1.0.0-alpha1-dev1"),
            ("1.0.0-alpha1-dev1", "1.0.0-alpha1-dev2"),
            ("1.0.0-alpha1+dev1", "1.0.0-alpha1+dev2"),
            ("1.0.0-beta1", "1.0.0-rc1"),
            ("1.0.0-beta1", "1.0.0-beta1"),
            ("1.0.0-beta1", "1.0.0-beta1+dev1"),
            ("1.0.0-beta1-dev1", "1.0.0-beta1-dev1"),
            ("1.0.0-beta1-dev1", "1.0.0-beta1-dev2"),
            ("1.0.0-beta1+dev1", "1.0.0-beta1+dev2"),
            ("1.0.0-rc1", "1.0.0"),
            ("1.0.0-rc1", "1.0.0-rc1"),
            ("1.0.0-rc1", "1.0.0-rc1+dev1"),
            ("1.0.0-rc1-dev1", "1.0.0-rc1-dev1"),
            ("1.0.0-rc1-dev1", "1.0.0-rc1-dev2"),
            ("1.0.0-rc1+dev1", "1.0.0-rc1+dev2"),
        ]
        for version1, version2 in versions:
            self.assertFalse(
                Version.from_string(version1) > Version.from_string(version2),
                f"{version1} should not be greater then {version2}",
            )

        versions = [
            ("1.0.0", object()),
            ("1.0.0", 1),
            ("1.0.0", True),
        ]
        for version1, version2 in versions:
            with self.assertRaisesRegex(ValueError, "Can't compare"):
                self.assertFalse(Version.from_string(version1) > version2)

    def test_greater_or_equal_then(self):
        versions = [
            ("1.0.0", "0.9.9999"),
            ("1.0.0", "1.0.0"),
            ("1.0.1", "1.0.0"),
            ("1.0.0", "1.0.0-dev1"),
            ("1.0.0", "1.0.0-alpha1"),
            ("1.0.0", "1.0.0-alpha1"),
            ("1.0.0", "1.0.0-beta1"),
            ("1.0.0", "1.0.0-rc1"),
            ("1.0.0-dev1", "1.0.0-dev1"),
            ("1.0.0+dev1", "1.0.0+dev1"),
            ("1.0.0-alpha1", "1.0.0-dev1"),
            ("1.0.0-alpha1", "1.0.0-alpha1"),
            ("1.0.0-alpha1", "1.0.0-alpha1-dev1"),
            ("1.0.0-alpha1-dev1", "1.0.0-alpha1-dev1"),
            ("1.0.0-alpha2", "1.0.0-alpha1"),
            ("1.0.0-beta1", "1.0.0-dev1"),
            ("1.0.0-beta1", "1.0.0-alpha1"),
            ("1.0.0-beta1", "1.0.0-beta1"),
            ("1.0.0-beta1", "1.0.0-beta1-dev1"),
            ("1.0.0-beta2", "1.0.0-beta1"),
            ("1.0.0-beta1-dev1", "1.0.0-beta1-dev1"),
            ("1.0.0-rc1", "1.0.0-dev1"),
            ("1.0.0-rc1", "1.0.0-alpha1"),
            ("1.0.0-rc1", "1.0.0-beta1"),
            ("1.0.0-rc1", "1.0.0-rc1"),
            ("1.0.0-rc1", "1.0.0-rc1-dev1"),
            ("1.0.0-rc2", "1.0.0-rc1"),
            ("1.0.0-rc1-dev1", "1.0.0-rc1-dev1"),
        ]
        for version1, version2 in versions:
            self.assertTrue(
                Version.from_string(version1) >= Version.from_string(version2),
                f"{version1} should be greater or equal then {version2}",
            )

        versions = [
            ("1.0.0", "1.0.0+dev1"),
            ("1.0.0-dev1", "1.0.0-dev2"),
            ("1.0.0", "1.0.1"),
            ("1.0.0-dev1", "1.0.0"),
            ("1.0.0-dev1", "1.0.0-alpha1"),
            ("1.0.0-dev1", "1.0.0-beta1"),
            ("1.0.0-dev1", "1.0.0-rc1"),
            ("1.0.0+dev1", "1.0.0+dev2"),
            ("1.0.0-alpha1", "1.0.0-beta1"),
            ("1.0.0-alpha1", "1.0.0-rc1"),
            ("1.0.0-alpha1", "1.0.0-alpha1+dev1"),
            ("1.0.0-alpha1-dev1", "1.0.0-alpha1-dev2"),
            ("1.0.0-alpha1+dev1", "1.0.0-alpha1+dev2"),
            ("1.0.0-beta1", "1.0.0-rc1"),
            ("1.0.0-beta1", "1.0.0-beta1+dev1"),
            ("1.0.0-beta1-dev1", "1.0.0-beta1-dev2"),
            ("1.0.0-beta1+dev1", "1.0.0-beta1+dev2"),
            ("1.0.0-rc1", "1.0.0"),
            ("1.0.0-rc1", "1.0.0-rc1+dev1"),
            ("1.0.0-rc1-dev1", "1.0.0-rc1-dev2"),
            ("1.0.0-rc1+dev1", "1.0.0-rc1+dev2"),
        ]
        for version1, version2 in versions:
            self.assertFalse(
                Version.from_string(version1) >= Version.from_string(version2),
                f"{version1} should not be greater or equal then {version2}",
            )

        versions = [
            ("1.0.0", object()),
            ("1.0.0", 1),
            ("1.0.0", True),
        ]
        for version1, version2 in versions:
            with self.assertRaisesRegex(ValueError, "Can't compare"):
                self.assertFalse(Version.from_string(version1) >= version2)

    def test_less_then(self):
        versions = [
            ("1.0.0", "0.9.9999"),
            ("1.0.1", "1.0.0"),
            ("1.0.0", "1.0.0-dev1"),
            ("1.0.0", "1.0.0-alpha1"),
            ("1.0.0", "1.0.0-alpha1"),
            ("1.0.0", "1.0.0-beta1"),
            ("1.0.0", "1.0.0-rc1"),
            ("1.0.0-alpha1", "1.0.0-dev1"),
            ("1.0.0-alpha1", "1.0.0-alpha1-dev1"),
            ("1.0.0-alpha2", "1.0.0-alpha1"),
            ("1.0.0-beta1", "1.0.0-dev1"),
            ("1.0.0-beta1", "1.0.0-alpha1"),
            ("1.0.0-beta1", "1.0.0-beta1-dev1"),
            ("1.0.0-beta2", "1.0.0-beta1"),
            ("1.0.0-rc1", "1.0.0-dev1"),
            ("1.0.0-rc1", "1.0.0-alpha1"),
            ("1.0.0-rc1", "1.0.0-beta1"),
            ("1.0.0-rc1", "1.0.0-rc1-dev1"),
            ("1.0.0-rc2", "1.0.0-rc1"),
            # the following ones are strange with current semver implementation
            # because they are both less then and greater then at the same time
            ("1.0.0", "1.0.0+dev1"),
            ("1.0.0-alpha1", "1.0.0-alpha1+dev1"),
            ("1.0.0+dev1", "1.0.0+dev2"),
            ("1.0.0-alpha1+dev1", "1.0.0-alpha1+dev2"),
            ("1.0.0-beta1", "1.0.0-beta1+dev1"),
            ("1.0.0-beta1+dev1", "1.0.0-beta1+dev2"),
            ("1.0.0-rc1", "1.0.0-rc1+dev1"),
            ("1.0.0-rc1+dev1", "1.0.0-rc1+dev2"),
        ]
        for version2, version1 in versions:
            self.assertTrue(
                Version.from_string(version1) < Version.from_string(version2),
                f"{version1} should be less then {version2}",
            )

        versions = [
            ("1.0.0", "1.0.0"),
            ("1.0.0-dev1", "1.0.0-dev1"),
            ("1.0.0-dev1", "1.0.0-dev2"),
            ("1.0.0", "1.0.1"),
            ("1.0.0-dev1", "1.0.0"),
            ("1.0.0-dev1", "1.0.0-alpha1"),
            ("1.0.0-dev1", "1.0.0-beta1"),
            ("1.0.0-dev1", "1.0.0-rc1"),
            ("1.0.0+dev1", "1.0.0+dev1"),
            ("1.0.0-alpha1", "1.0.0-alpha1"),
            ("1.0.0-alpha1", "1.0.0-beta1"),
            ("1.0.0-alpha1", "1.0.0-rc1"),
            ("1.0.0-alpha1-dev1", "1.0.0-alpha1-dev1"),
            ("1.0.0-alpha1-dev1", "1.0.0-alpha1-dev2"),
            ("1.0.0-beta1", "1.0.0-rc1"),
            ("1.0.0-beta1", "1.0.0-beta1"),
            ("1.0.0-beta1-dev1", "1.0.0-beta1-dev1"),
            ("1.0.0-beta1-dev1", "1.0.0-beta1-dev2"),
            ("1.0.0-rc1", "1.0.0"),
            ("1.0.0-rc1", "1.0.0-rc1"),
            ("1.0.0-rc1-dev1", "1.0.0-rc1-dev1"),
            ("1.0.0-rc1-dev1", "1.0.0-rc1-dev2"),
        ]
        for version2, version1 in versions:
            self.assertFalse(
                Version.from_string(version1) < Version.from_string(version2),
                f"{version1} should not be less then {version2}",
            )

        versions = [
            ("1.0.0", object()),
            ("1.0.0", 1),
            ("1.0.0", True),
        ]
        for version1, version2 in versions:
            with self.assertRaisesRegex(ValueError, "Can't compare"):
                self.assertFalse(Version.from_string(version1) < version2)

    def test_less_or_equal_then(self):
        versions = [
            ("1.0.0", "0.9.9999"),
            ("1.0.0", "1.0.0"),
            ("1.0.1", "1.0.0"),
            ("1.0.0", "1.0.0-dev1"),
            ("1.0.0", "1.0.0-alpha1"),
            ("1.0.0", "1.0.0-alpha1"),
            ("1.0.0", "1.0.0-beta1"),
            ("1.0.0", "1.0.0-rc1"),
            ("1.0.0-dev1", "1.0.0-dev1"),
            ("1.0.0+dev1", "1.0.0+dev1"),
            ("1.0.0-alpha1", "1.0.0-dev1"),
            ("1.0.0-alpha1", "1.0.0-alpha1"),
            ("1.0.0-alpha1", "1.0.0-alpha1-dev1"),
            ("1.0.0-alpha1-dev1", "1.0.0-alpha1-dev1"),
            ("1.0.0-alpha2", "1.0.0-alpha1"),
            ("1.0.0-beta1", "1.0.0-dev1"),
            ("1.0.0-beta1", "1.0.0-alpha1"),
            ("1.0.0-beta1", "1.0.0-beta1"),
            ("1.0.0-beta1", "1.0.0-beta1-dev1"),
            ("1.0.0-beta2", "1.0.0-beta1"),
            ("1.0.0-beta1-dev1", "1.0.0-beta1-dev1"),
            ("1.0.0-rc1", "1.0.0-dev1"),
            ("1.0.0-rc1", "1.0.0-alpha1"),
            ("1.0.0-rc1", "1.0.0-beta1"),
            ("1.0.0-rc1", "1.0.0-rc1"),
            ("1.0.0-rc1", "1.0.0-rc1-dev1"),
            ("1.0.0-rc2", "1.0.0-rc1"),
            ("1.0.0-rc1-dev1", "1.0.0-rc1-dev1"),
            # the strange ones
            ("1.0.0", "1.0.0+dev1"),
            ("1.0.0+dev1", "1.0.0+dev2"),
            ("1.0.0-alpha1", "1.0.0-alpha1+dev1"),
            ("1.0.0-alpha1+dev1", "1.0.0-alpha1+dev2"),
            ("1.0.0-beta1", "1.0.0-beta1+dev1"),
            ("1.0.0-beta1+dev1", "1.0.0-beta1+dev2"),
            ("1.0.0-rc1", "1.0.0-rc1+dev1"),
            ("1.0.0-rc1+dev1", "1.0.0-rc1+dev2"),
        ]
        for version2, version1 in versions:
            self.assertTrue(
                Version.from_string(version1) <= Version.from_string(version2),
                f"{version1} should be greater or equal then {version2}",
            )

        versions = [
            ("1.0.0-dev1", "1.0.0-dev2"),
            ("1.0.0", "1.0.1"),
            ("1.0.0-dev1", "1.0.0"),
            ("1.0.0-dev1", "1.0.0-alpha1"),
            ("1.0.0-dev1", "1.0.0-beta1"),
            ("1.0.0-dev1", "1.0.0-rc1"),
            ("1.0.0-alpha1", "1.0.0-beta1"),
            ("1.0.0-alpha1", "1.0.0-rc1"),
            ("1.0.0-alpha1-dev1", "1.0.0-alpha1-dev2"),
            ("1.0.0-beta1", "1.0.0-rc1"),
            ("1.0.0-beta1-dev1", "1.0.0-beta1-dev2"),
            ("1.0.0-rc1", "1.0.0"),
            ("1.0.0-rc1-dev1", "1.0.0-rc1-dev2"),
        ]
        for version2, version1 in versions:
            self.assertFalse(
                Version.from_string(version1) <= Version.from_string(version2),
                f"{version1} should not be greater or equal then {version2}",
            )

        versions = [
            ("1.0.0", object()),
            ("1.0.0", 1),
            ("1.0.0", True),
        ]
        for version1, version2 in versions:
            with self.assertRaisesRegex(ValueError, "Can't compare"):
                self.assertFalse(Version.from_string(version1) <= version2)

    def test_is_dev_release(self):
        versions = [
            "1.0.0-dev1",
            "1.0.0-alpha1-dev1",
            "1.0.0-beta1-dev1",
            "1.0.0-rc1-dev1",
        ]
        for version in versions:
            self.assertTrue(
                Version.from_string(version).is_dev_release,
                f"{version} is not a dev release",
            )

        versions = [
            "1.0.0",
            "1.0.0+foo1",
            "1.0.0+dev1",
            "1.0.0-alpha1",
            "1.0.0-beta1",
            "1.0.0-rc1",
            "1.0.0-alpha1+dev1",
            "1.0.0-beta1+dev1",
            "1.0.0-rc1+dev1",
        ]
        for version in versions:
            self.assertFalse(
                Version.from_string(version).is_dev_release,
                f"{version} is a dev release",
            )

    def test_is_alpha_release(self):
        versions = [
            "1.0.0-alpha1",
            "1.0.0-alpha1+foo1",
            "1.0.0-alpha1-foo1",
            "1.0.0-alpha1+dev1",
            "1.0.0-alpha1-dev1",
        ]
        for version in versions:
            self.assertTrue(
                Version.from_string(version).is_alpha_release,
                f"{version} is not an alpha release",
            )

        versions = [
            "1.0.0",
            "1.0.0+dev1",
            "1.0.0-dev1",
            "1.0.0-a1",
            "1.0.0-b1",
            "1.0.0-rc1",
        ]
        for version in versions:
            self.assertFalse(
                Version.from_string(version).is_alpha_release,
                f"{version} is an alpha release",
            )

    def test_is_beta_release(self):
        versions = [
            "1.0.0-beta1",
            "1.0.0-beta1+foo1",
            "1.0.0-beta1-foo1",
            "1.0.0-beta1+dev1",
            "1.0.0-beta1-dev1",
        ]
        for version in versions:
            self.assertTrue(
                Version.from_string(version).is_beta_release,
                f"{version} is not a beta release",
            )

        versions = [
            "1.0.0",
            "1.0.0-dev1",
            "1.0.0+dev1",
            "1.0.0-alpha1",
            "1.0.0-b1",
            "1.0.0-rc1",
        ]
        for version in versions:
            self.assertFalse(
                Version.from_string(version).is_beta_release,
                f"{version} is a beta release",
            )

    def test_is_release_candidate(self):
        versions = [
            "1.0.0-rc1",
            "1.0.0-rc1+foo1",
            "1.0.0-rc1-foo1",
            "1.0.0-rc1+dev1",
            "1.0.0-rc1-dev1",
        ]
        for version in versions:
            self.assertTrue(
                Version.from_string(version).is_release_candidate,
                f"{version} is not a release candidate",
            )

        versions = [
            "1.0.0",
            "1.0.0+dev1",
            "1.0.0-dev1",
            "1.0.0-alpha1",
            "1.0.0-beta1",
        ]
        for version in versions:
            self.assertFalse(
                Version.from_string(version).is_release_candidate,
                f"{version} is a release candidate",
            )

    def test_pre(self):
        versions = [
            ("1.0.0", None),
            ("1.0.0+dev1", None),
            ("1.0.0-dev1", None),
            ("1.0.0-alpha1", ("alpha", 1)),
            ("1.0.0-beta1", ("beta", 1)),
            ("1.0.0-rc1", ("rc", 1)),
            ("1.0.0-rc1+foo1", ("rc", 1)),
            ("1.0.0-alpha1+dev1", ("alpha", 1)),
            ("1.0.0-beta1+dev1", ("beta", 1)),
            ("1.0.0-rc1+dev1", ("rc", 1)),
            ("1.0.0-alpha1-dev1", ("alpha", 1)),
            ("1.0.0-beta1-dev1", ("beta", 1)),
            ("1.0.0-rc1-dev1", ("rc", 1)),
        ]

        for version, expected in versions:
            self.assertEqual(Version.from_string(version).pre, expected)

    def test_local(self):
        versions = [
            ("1.0.0", None),
            ("1.0.0+foo1", ("foo", 1)),
            ("1.0.0+dev1", ("dev", 1)),
            ("1.0.0-dev1", None),
            ("1.0.0-alpha1", None),
            ("1.0.0-beta1", None),
            ("1.0.0-rc1", None),
            ("1.0.0-rc1+foo1", ("foo", 1)),
            ("1.0.0-alpha1+dev1", ("dev", 1)),
            ("1.0.0-beta1+dev1", ("dev", 1)),
            ("1.0.0-rc1+dev1", ("dev", 1)),
            ("1.0.0-alpha1-dev1", None),
            ("1.0.0-beta1-dev1", None),
            ("1.0.0-rc1-dev1", None),
        ]

        for version, expected in versions:
            local = Version.from_string(version).local
            self.assertEqual(
                local,
                expected,
                f"{version} has not the expected local {expected}. Instead it "
                f"is {local}",
            )


class SemanticVersionCalculatorTestCase(unittest.TestCase):
    def test_next_patch_version(self):
        calculator = VersionCalculator()

        versions = [
            ("0.0.1", "0.0.2"),
            ("1.2.3", "1.2.4"),
            ("1.2.3+dev1", "1.2.4"),
            ("1.2.3-dev1", "1.2.3"),
            ("1.2.3-foo1", "1.2.3"),
            ("1.2.3-alpha1", "1.2.3"),
            ("1.2.3-beta1", "1.2.3"),
            ("1.2.3-rc1", "1.2.3"),
            ("1.2.3-alpha1+dev1", "1.2.3"),
            ("1.2.3-beta1+dev1", "1.2.3"),
            ("1.2.3-rc1+dev1", "1.2.3"),
            ("1.2.3-alpha1-dev1", "1.2.3"),
            ("1.2.3-beta1-dev1", "1.2.3"),
            ("1.2.3-rc1-dev1", "1.2.3"),
            ("22.4.1", "22.4.2"),
            ("22.4.1+dev3", "22.4.2"),
            ("22.4.1-dev1", "22.4.1"),
            ("22.4.1-dev3", "22.4.1"),
            ("1.0.0-a1", "1.0.0"),
            ("1.1.0-alpha1", "1.1.0"),
            ("1.0.0+dev1", "1.0.1"),
            ("1.1.0+dev1", "1.1.1"),
            ("1.0.0-dev1", "1.0.0"),
            ("1.1.0-dev1", "1.1.0"),
        ]

        for current_version, assert_version in versions:
            release_version = calculator.next_patch_version(
                Version.from_string(current_version)
            )

            self.assertEqual(
                release_version,
                Version.from_string(assert_version),
                f"{release_version} is not the expected next patch version "
                f"{assert_version} for {current_version}",
            )

    def test_next_calendar_versions(self):
        calculator = VersionCalculator()
        today = datetime.today()
        year_short = today.year % 100

        current_versions = [
            "21.4.1-dev3",
            f"19.{today.month}.1-dev3",
            f"{year_short}.{today.month}.1-dev3",
            f"{year_short}.{today.month}.1",
        ]
        assert_versions = [
            f"{year_short}.{today.month}.0",
            f"{year_short}.{today.month}.0",
            f"{year_short}.{today.month}.1",
            f"{year_short}.{today.month}.2",
        ]

        for current_version, assert_version in zip(
            current_versions, assert_versions
        ):
            release_version = calculator.next_calendar_version(
                Version.from_string(current_version)
            )
            self.assertEqual(
                release_version, Version.from_string(assert_version)
            )

    def test_next_calendar_version_error(self):
        calculator = VersionCalculator()
        today = datetime.today()
        year_short = today.year % 100

        with self.assertRaisesRegex(VersionError, "'.+' is higher than '.+'."):
            calculator.next_calendar_version(
                Version.from_string(f"{year_short  + 1}.1.0")
            )

        with self.assertRaisesRegex(VersionError, "'.+' is higher than '.+'."):
            calculator.next_calendar_version(
                Version.from_string(f"{year_short}.{today.month + 1}.0")
            )

    def test_next_minor_version(self):
        calculator = VersionCalculator()

        versions = [
            ("0.0.1", "0.1.0"),
            ("1.2.3", "1.3.0"),
            ("1.2.3+dev1", "1.3.0"),
            ("1.2.3-dev1", "1.3.0"),
            ("1.2.3-foo1", "1.3.0"),
            ("1.2.3-alpha1", "1.3.0"),
            ("1.2.3-beta1", "1.3.0"),
            ("1.2.3-rc1", "1.3.0"),
            ("1.2.3-alpha1+dev1", "1.3.0"),
            ("1.2.3-beta1+dev1", "1.3.0"),
            ("1.2.3-rc1+dev1", "1.3.0"),
            ("22.4.1", "22.5.0"),
            ("22.4.1+dev3", "22.5.0"),
            ("22.4.1-dev1", "22.5.0"),
            ("22.4.1-dev3", "22.5.0"),
            ("1.0.0-a1", "1.0.0"),
            ("1.1.0-alpha1", "1.1.0"),
            ("1.0.0+dev1", "1.1.0"),
            ("1.1.0+dev1", "1.2.0"),
            ("1.0.0-dev1", "1.0.0"),
            ("1.1.0-dev1", "1.1.0"),
        ]
        for current_version, assert_version in versions:
            release_version = calculator.next_minor_version(
                Version.from_string(current_version)
            )
            self.assertEqual(
                release_version,
                Version.from_string(assert_version),
                f"{release_version} is not the expected next minor version "
                f"{assert_version} for {current_version}",
            )

    def test_next_major_version(self):
        calculator = VersionCalculator()

        versions = [
            ("0.0.1", "1.0.0"),
            ("1.2.3", "2.0.0"),
            ("1.2.3+dev1", "2.0.0"),
            ("1.2.3-dev1", "2.0.0"),
            ("1.2.3-foo1", "2.0.0"),
            ("1.2.3-alpha1", "2.0.0"),
            ("1.2.3-beta1", "2.0.0"),
            ("1.2.3-rc1", "2.0.0"),
            ("1.2.3-alpha1+dev1", "2.0.0"),
            ("1.2.3-beta1+dev1", "2.0.0"),
            ("1.2.3-rc1+dev1", "2.0.0"),
            ("1.2.3-alpha1-dev1", "2.0.0"),
            ("1.2.3-beta1-dev1", "2.0.0"),
            ("1.2.3-rc1-dev1", "2.0.0"),
            ("22.4.1", "23.0.0"),
            ("22.4.1+dev3", "23.0.0"),
            ("22.4.1-dev1", "23.0.0"),
            ("22.4.1-dev3", "23.0.0"),
            ("1.0.0-a1", "1.0.0"),
            ("1.0.0-beta1", "1.0.0"),
            ("1.1.0-alpha1", "2.0.0"),
            ("1.0.0+dev1", "2.0.0"),
            ("1.1.0+dev1", "2.0.0"),
            ("1.0.0-dev1", "1.0.0"),
            ("1.1.0-dev1", "2.0.0"),
        ]

        for current_version, assert_version in versions:
            release_version = calculator.next_major_version(
                Version.from_string(current_version)
            )
            self.assertEqual(
                release_version,
                Version.from_string(assert_version),
                f"{release_version} is not the expected next major version "
                f"{assert_version} for {current_version}",
            )

    def test_next_alpha_version(self):
        calculator = VersionCalculator()

        versions = [
            ("0.0.1", "0.0.2-alpha1"),
            ("1.2.3", "1.2.4-alpha1"),
            ("1.2.3+dev1", "1.2.4-alpha1"),
            ("1.2.3-dev1", "1.2.3-alpha1"),
            ("1.2.3-post1", "1.2.4-alpha1"),
            ("1.2.3-alpha1", "1.2.3-alpha2"),
            ("1.2.3-beta1", "1.2.4-alpha1"),
            ("1.2.3-rc1", "1.2.4-alpha1"),
            ("1.2.3-alpha1+dev1", "1.2.3-alpha2"),
            ("1.2.3-beta1+dev1", "1.2.4-alpha1"),
            ("1.2.3-rc1+dev1", "1.2.4-alpha1"),
            ("1.2.3-alpha1-dev1", "1.2.3-alpha1"),
            ("1.2.3-beta1-dev1", "1.2.4-alpha1"),
            ("1.2.3-beta1-dev1", "1.2.4-alpha1"),
            ("22.4.1", "22.4.2-alpha1"),
            ("22.4.1+dev3", "22.4.2-alpha1"),
            ("22.4.1-dev1", "22.4.1-alpha1"),
            ("22.4.1-dev3", "22.4.1-alpha1"),
            ("1.0.0-a1", "1.0.1-alpha1"),
            ("1.0.0-beta1", "1.0.1-alpha1"),
            ("1.1.0-alpha1", "1.1.0-alpha2"),
            ("1.0.0+dev1", "1.0.1-alpha1"),
            ("1.1.0+dev1", "1.1.1-alpha1"),
            ("1.0.0-dev1", "1.0.0-alpha1"),
            ("1.1.0-dev1", "1.1.0-alpha1"),
        ]

        for current_version, assert_version in versions:
            release_version = calculator.next_alpha_version(
                Version.from_string(current_version)
            )
            self.assertEqual(
                release_version,
                Version.from_string(assert_version),
                f"{release_version} is not the expected next alpha version "
                f"{assert_version} for {current_version}",
            )

    def test_next_beta_version(self):
        calculator = VersionCalculator()

        versions = [
            ("0.0.1", "0.0.2-beta1"),
            ("1.2.3", "1.2.4-beta1"),
            ("1.2.3+dev1", "1.2.4-beta1"),
            ("1.2.3-dev1", "1.2.3-beta1"),
            ("1.2.3-foo1", "1.2.4-beta1"),
            ("1.2.3-alpha1", "1.2.3-beta1"),
            ("1.2.3-beta1", "1.2.3-beta2"),
            ("1.2.3-rc1", "1.2.4-beta1"),
            ("1.2.3-alpha1+dev1", "1.2.3-beta1"),
            ("1.2.3-beta1+dev1", "1.2.3-beta2"),
            ("1.2.3-rc1+dev1", "1.2.4-beta1"),
            ("1.2.3-alpha1-dev1", "1.2.3-beta1"),
            ("1.2.3-beta1-dev1", "1.2.3-beta1"),
            ("1.2.3-rc1-dev1", "1.2.4-beta1"),
            ("22.4.1", "22.4.2-beta1"),
            ("22.4.1+dev3", "22.4.2-beta1"),
            ("22.4.1-dev1", "22.4.1-beta1"),
            ("22.4.1-dev3", "22.4.1-beta1"),
            # actually 1.0.0-beta1 would also be ok, but it would require to
            # to add extra code for not used versioning
            ("1.0.0-a1", "1.0.1-beta1"),
            ("1.0.0-beta1", "1.0.0-beta2"),
            ("1.1.0-alpha1", "1.1.0-beta1"),
            ("1.0.0+dev1", "1.0.1-beta1"),
            ("1.1.0+dev1", "1.1.1-beta1"),
            ("1.0.0-dev1", "1.0.0-beta1"),
            ("1.1.0-dev1", "1.1.0-beta1"),
        ]

        for current_version, assert_version in versions:
            release_version = calculator.next_beta_version(
                Version.from_string(current_version)
            )
            self.assertEqual(
                release_version,
                Version.from_string(assert_version),
                f"{release_version} is not the expected next beta version "
                f"{assert_version} for {current_version}",
            )

    def test_next_release_candidate_version(self):
        calculator = VersionCalculator()

        versions = [
            ("0.0.1", "0.0.2-rc1"),
            ("1.2.3", "1.2.4-rc1"),
            ("1.2.3+dev1", "1.2.4-rc1"),
            ("1.2.3-dev1", "1.2.3-rc1"),
            ("1.2.3-foo1", "1.2.4-rc1"),
            ("1.2.3-alpha1", "1.2.3-rc1"),
            ("1.2.3-beta1", "1.2.3-rc1"),
            ("1.2.3-rc1", "1.2.3-rc2"),
            ("1.2.3-alpha1+dev1", "1.2.3-rc1"),
            ("1.2.3-beta1+dev1", "1.2.3-rc1"),
            ("1.2.3-rc1+dev1", "1.2.3-rc2"),
            ("1.2.3-alpha1-dev1", "1.2.3-rc1"),
            ("1.2.3-beta1-dev1", "1.2.3-rc1"),
            ("1.2.3-rc1-dev1", "1.2.3-rc1"),
            ("22.4.1", "22.4.2-rc1"),
            ("22.4.1+dev3", "22.4.2-rc1"),
            ("22.4.1-dev1", "22.4.1-rc1"),
            ("22.4.1-dev3", "22.4.1-rc1"),
            ("1.0.0-a1", "1.0.1-rc1"),
            ("1.0.0-beta1", "1.0.0-rc1"),
            ("1.1.0-alpha1", "1.1.0-rc1"),
            ("1.0.0+dev1", "1.0.1-rc1"),
            ("1.1.0+dev1", "1.1.1-rc1"),
            ("1.0.0-dev1", "1.0.0-rc1"),
            ("1.1.0-dev1", "1.1.0-rc1"),
        ]

        for current_version, assert_version in versions:
            release_version = calculator.next_release_candidate_version(
                Version.from_string(current_version)
            )
            self.assertEqual(
                release_version,
                Version.from_string(assert_version),
                f"{release_version} is not the expected next rc version "
                f"{assert_version} for {current_version}",
            )

    def test_next_dev_version(self):
        calculator = VersionCalculator()

        versions = [
            ("0.0.1", "0.0.2-dev1"),
            ("1.2.3", "1.2.4-dev1"),
            ("1.2.3+dev1", "1.2.4-dev1"),
            ("1.2.3-dev1", "1.2.3-dev2"),
            ("1.2.3-foo1", "1.2.3-foo2-dev1"),
            ("1.2.3-alpha1", "1.2.3-alpha2-dev1"),
            ("1.2.3-beta1", "1.2.3-beta2-dev1"),
            ("1.2.3-rc1", "1.2.3-rc2-dev1"),
            ("1.2.3-alpha1+dev1", "1.2.3-alpha2-dev1"),
            ("1.2.3-beta1+dev1", "1.2.3-beta2-dev1"),
            ("1.2.3-rc1+dev1", "1.2.3-rc2-dev1"),
            ("1.2.3-alpha1-dev1", "1.2.3-alpha1-dev2"),
            ("1.2.3-beta1-dev1", "1.2.3-beta1-dev2"),
            ("1.2.3-rc1-dev1", "1.2.3-rc1-dev2"),
            ("22.4.1", "22.4.2-dev1"),
            ("22.4.1+dev3", "22.4.2-dev1"),
            ("22.4.1-dev1", "22.4.1-dev2"),
            ("22.4.1-dev3", "22.4.1-dev4"),
            ("1.0.0-a1", "1.0.0-a2-dev1"),
            ("1.0.0-beta1", "1.0.0-beta2-dev1"),
            ("1.1.0-alpha1", "1.1.0-alpha2-dev1"),
            ("1.0.0+dev1", "1.0.1-dev1"),
            ("1.1.0+dev1", "1.1.1-dev1"),
            ("1.0.0-dev1", "1.0.0-dev2"),
            ("1.1.0-dev1", "1.1.0-dev2"),
        ]

        for current_version, assert_version in versions:
            release_version = calculator.next_dev_version(
                Version.from_string(current_version)
            )
            self.assertEqual(
                release_version,
                Version.from_string(assert_version),
                f"{release_version} is not the expected next development "
                f"version {assert_version} for {current_version}",
            )
pontos-25.3.2/tests/version/test_commands.py000066400000000000000000000004351476255566300212130ustar00rootroot00000000000000# Copyright (C) 2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later
#

import unittest

from pontos.version.commands import get_commands


class GetCommandsTestCase(unittest.TestCase):
    def test_available_commands(self):
        self.assertEqual(len(get_commands()), 6)
pontos-25.3.2/tests/version/test_errors.py000066400000000000000000000007071476255566300207300ustar00rootroot00000000000000# Copyright (C) 2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later
#


import unittest

from pontos.version import VersionError


class VersionErrorTestCase(unittest.TestCase):
    def test_should_print_message(self):
        err = VersionError("foo bar")
        self.assertEqual(str(err), "foo bar")

    def test_should_raise(self):
        with self.assertRaisesRegex(VersionError, "^foo bar$"):
            raise VersionError("foo bar")
pontos-25.3.2/tests/version/test_helper.py000066400000000000000000000157301476255566300206750ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2020-2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later
#

import unittest
from unittest.mock import patch

from pontos.git import Git
from pontos.version.helper import (
    get_last_release_version,
    get_last_release_versions,
)
from pontos.version.schemes import (
    PEP440VersioningScheme,
    SemanticVersioningScheme,
)

parse_version = PEP440VersioningScheme.parse_version
Version = PEP440VersioningScheme.version_cls


class GetLastReleaseVersionsTestCase(unittest.TestCase):
    @patch("pontos.version.helper.Git", spec=Git)
    def test_get_last_release_versions(self, git_mock):
        git_interface = git_mock.return_value
        git_interface.list_tags.return_value = ["1", "2", "3.55"]

        it = get_last_release_versions(parse_version)

        version = next(it)
        self.assertEqual(version, Version("3.55"))

        version = next(it)
        self.assertEqual(version, Version("2"))

        version = next(it)
        self.assertEqual(version, Version("1"))

        with self.assertRaises(StopIteration):
            next(it)

    @patch("pontos.version.helper.Git", spec=Git)
    def test_get_no_release_versions(self, git_mock):
        git_interface = git_mock.return_value
        git_interface.list_tags.return_value = []

        it = get_last_release_versions(parse_version)
        with self.assertRaises(StopIteration):
            next(it)

    @patch("pontos.version.helper.Git", spec=Git)
    def test_get_last_release_versions_with_git_prefix(self, git_mock):
        git_interface = git_mock.return_value
        git_interface.list_tags.return_value = ["v1", "v2", "v3.55"]

        it = get_last_release_versions(parse_version, git_tag_prefix="v")

        version = next(it)
        self.assertEqual(version, Version("3.55"))

        version = next(it)
        self.assertEqual(version, Version("2"))

        version = next(it)
        self.assertEqual(version, Version("1"))

        with self.assertRaises(StopIteration):
            next(it)

    @patch("pontos.version.helper.Git", spec=Git)
    def test_get_last_release_versions_ignore_pre_releases(self, git_mock):
        git_interface = git_mock.return_value
        git_interface.list_tags.return_value = [
            "1",
            "2",
            "3.55a1",
            "3.56.dev1",
            "4.0.0rc1",
            "4.0.1b1",
        ]
        it = get_last_release_versions(parse_version, ignore_pre_releases=True)

        version = next(it)
        self.assertEqual(version, Version("2"))

        version = next(it)
        self.assertEqual(version, Version("1"))

        with self.assertRaises(StopIteration):
            next(it)

    @patch("pontos.version.helper.Git", spec=Git)
    def test_get_last_release_versions_no_non_pre_release(self, git_mock):
        git_interface = git_mock.return_value
        git_interface.list_tags.return_value = [
            "3.55a1",
            "3.56.dev1",
            "4.0.0rc1",
            "4.0.1b1",
        ]

        it = get_last_release_versions(parse_version, ignore_pre_releases=True)

        with self.assertRaises(StopIteration):
            next(it)

    @patch("pontos.version.helper.Git", spec=Git)
    def test_invalid_version(self, git_mock):
        git_interface = git_mock.return_value
        git_interface.list_tags.return_value = [
            "1.0.0",
            "3.55a1",
            "2.0.0",
        ]

        it = get_last_release_versions(SemanticVersioningScheme.parse_version)

        version = next(it)
        self.assertEqual(
            version, SemanticVersioningScheme.parse_version("2.0.0")
        )

        version = next(it)
        self.assertEqual(
            version, SemanticVersioningScheme.parse_version("1.0.0")
        )

        with self.assertRaises(StopIteration):
            next(it)


class GetLastReleaseVersionTestCase(unittest.TestCase):
    @patch("pontos.version.helper.Git", spec=Git)
    def test_get_last_release_version(self, git_mock):
        git_interface = git_mock.return_value
        git_interface.list_tags.return_value = ["1", "2", "3.55"]
        self.assertEqual(
            get_last_release_version(parse_version), Version("3.55")
        )

    @patch("pontos.version.helper.Git", spec=Git)
    def test_get_no_release_version(self, git_mock):
        git_interface = git_mock.return_value
        git_interface.list_tags.return_value = []
        self.assertIsNone(get_last_release_version(parse_version))

    @patch("pontos.version.helper.Git", spec=Git)
    def test_get_last_release_version_with_git_prefix(self, git_mock):
        git_interface = git_mock.return_value
        git_interface.list_tags.return_value = ["v1", "v2", "v3.55"]
        self.assertEqual(
            get_last_release_version(parse_version, git_tag_prefix="v"),
            Version("3.55"),
        )

    @patch("pontos.version.helper.Git", spec=Git)
    def test_get_last_release_version_ignore_pre_releases(self, git_mock):
        git_interface = git_mock.return_value
        git_interface.list_tags.return_value = [
            "1",
            "2",
            "3.55a1",
            "3.56.dev1",
            "4.0.0rc1",
            "4.0.1b1",
        ]
        self.assertEqual(
            get_last_release_version(parse_version, ignore_pre_releases=True),
            Version("2"),
        )

    @patch("pontos.version.helper.Git", spec=Git)
    def test_get_last_release_version_no_non_pre_release(self, git_mock):
        git_interface = git_mock.return_value
        git_interface.list_tags.return_value = [
            "3.55a1",
            "3.56.dev1",
            "4.0.0rc1",
            "4.0.1b1",
        ]
        self.assertIsNone(
            get_last_release_version(parse_version, ignore_pre_releases=True)
        )

    @patch("pontos.version.helper.Git", spec=Git)
    def test_get_last_release_version_tag_name(self, git_mock):
        git_interface = git_mock.return_value
        git_interface.list_tags.return_value = [
            "4.0.0rc1",
            "4.0.1b1",
        ]
        self.assertEqual(
            get_last_release_version(parse_version, tag_name="4.0.*"),
            Version("4.0.1b1"),
        )

    @patch("pontos.version.helper.Git", spec=Git)
    def test_invalid_version(self, git_mock):
        git_interface = git_mock.return_value
        git_interface.list_tags.return_value = ["1.0.0", "2.0.0", "3.55a1"]

        self.assertEqual(
            get_last_release_version(SemanticVersioningScheme.parse_version),
            SemanticVersioningScheme.parse_version("2.0.0"),
        )

    @patch("pontos.version.helper.Git", spec=Git)
    def test_success_with_invalid_version(self, git_mock):
        git_interface = git_mock.return_value
        git_interface.list_tags.return_value = [
            "1.0.0",
            "2.0.0",
            "3.55a1",
            "4.0.0",
        ]

        version = get_last_release_version(
            SemanticVersioningScheme.parse_version
        )
        self.assertEqual(
            version, SemanticVersioningScheme.parse_version("4.0.0")
        )
pontos-25.3.2/tests/version/test_main.py000066400000000000000000000247611476255566300203460ustar00rootroot00000000000000# Copyright (C) 2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later
#

import unittest
from contextlib import redirect_stderr, redirect_stdout
from datetime import datetime
from io import StringIO

from pontos.testing import temp_directory
from pontos.version._main import VersionExitCode, main


class MainTestCase(unittest.TestCase):
    def test_no_command(self):
        with self.assertRaises(SystemExit), redirect_stderr(StringIO()):
            main([])

    def test_no_project(self):
        with (
            temp_directory(change_into=True),
            self.assertRaises(SystemExit) as cm,
            redirect_stderr(StringIO()),
        ):
            main(["show"])

        self.assertEqual(cm.exception.code, VersionExitCode.NO_PROJECT)

    def test_update_success(self):
        with (
            temp_directory(change_into=True) as temp_dir,
            redirect_stdout(StringIO()) as out,
            self.assertRaises(SystemExit) as cm,
        ):
            version_file = temp_dir / "package.json"
            version_file.write_text(
                '{"name": "foo", "version": "1.2.3"}',
                encoding="utf8",
            )
            main(["update", "2.0.0"])

        self.assertEqual(
            out.getvalue(), "Updated version from 1.2.3 to 2.0.0.\n"
        )

        self.assertEqual(cm.exception.code, VersionExitCode.SUCCESS)

    def test_update_failure(self):
        with (
            temp_directory(change_into=True) as temp_dir,
            redirect_stderr(StringIO()),
            self.assertRaises(SystemExit) as cm,
        ):
            version_file = temp_dir / "package.json"
            version_file.write_text(
                "{}",
                encoding="utf8",
            )
            main(["update", "2.0.0"])

        self.assertEqual(cm.exception.code, VersionExitCode.UPDATE_ERROR)

    def test_update_already_up_to_date(self):
        with (
            temp_directory(change_into=True) as temp_dir,
            redirect_stdout(StringIO()) as out,
            self.assertRaises(SystemExit) as cm,
        ):
            version_file = temp_dir / "package.json"
            version_file.write_text(
                '{"name": "foo", "version": "1.2.3"}',
                encoding="utf8",
            )
            main(["update", "1.2.3"])

        self.assertEqual(out.getvalue(), "Version is already up-to-date.\n")

        self.assertEqual(cm.exception.code, VersionExitCode.SUCCESS)

    def test_show(self):
        with (
            temp_directory(change_into=True) as temp_dir,
            redirect_stdout(StringIO()) as out,
            self.assertRaises(SystemExit) as cm,
        ):
            version_file = temp_dir / "package.json"
            version_file.write_text(
                '{"name": "foo", "version": "1.2.3"}',
                encoding="utf8",
            )
            main(["show"])

        self.assertEqual(out.getvalue(), "1.2.3\n")

        self.assertEqual(cm.exception.code, VersionExitCode.SUCCESS)

    def test_show_error(self):
        with (
            temp_directory(change_into=True) as temp_dir,
            redirect_stderr(StringIO()) as out,
            self.assertRaises(SystemExit) as cm,
        ):
            version_file = temp_dir / "package.json"
            version_file.write_text(
                '{"name": "foo", "version": "abc"}',
                encoding="utf8",
            )
            main(["show"])

        self.assertEqual(out.getvalue(), "Invalid version: 'abc'\n")

        self.assertEqual(
            cm.exception.code, VersionExitCode.CURRENT_VERSION_ERROR
        )

    def test_verify_failure(self):
        with (
            temp_directory(change_into=True) as temp_dir,
            redirect_stderr(StringIO()) as out,
            self.assertRaises(SystemExit) as cm,
        ):
            version_file = temp_dir / "package.json"
            version_file.write_text(
                '{"name": "foo", "version": "1.2.3"}',
                encoding="utf8",
            )
            main(["verify", "1.2.4"])

        self.assertEqual(
            out.getvalue(),
            "Provided version 1.2.4 does not match the current version "
            f"1.2.3 in {version_file.resolve()}.\n",
        )

        self.assertEqual(cm.exception.code, VersionExitCode.VERIFY_ERROR)

    def test_verify_success(self):
        with (
            temp_directory(change_into=True) as temp_dir,
            self.assertRaises(SystemExit) as cm,
        ):
            version_file = temp_dir / "package.json"
            version_file.write_text(
                '{"name": "foo", "version": "1.2.3"}',
                encoding="utf8",
            )
            main(["verify", "1.2.3"])

        self.assertEqual(cm.exception.code, VersionExitCode.SUCCESS)

    def test_verify_current(self):
        with (
            temp_directory(change_into=True) as temp_dir,
            self.assertRaises(SystemExit) as cm,
        ):
            version_file = temp_dir / "package.json"
            version_file.write_text(
                '{"name": "foo", "version": "1.2.3"}',
                encoding="utf8",
            )
            main(["verify", "current"])

        self.assertEqual(cm.exception.code, VersionExitCode.SUCCESS)

    def test_next_invalid_current(self):
        with (
            redirect_stderr(StringIO()),
            temp_directory(change_into=True) as temp_dir,
            self.assertRaises(SystemExit) as cm,
        ):
            version_file = temp_dir / "package.json"
            version_file.write_text(
                '{"name": "foo", "version": "1.2.tt"}',
                encoding="utf8",
            )
            main(["next", "dev", "--versioning-scheme", "pep440"])

        self.assertEqual(
            cm.exception.code, VersionExitCode.CURRENT_VERSION_ERROR
        )

    def test_next_dev(self):
        with (
            temp_directory(change_into=True) as temp_dir,
            redirect_stdout(StringIO()) as out,
            self.assertRaises(SystemExit) as cm,
        ):
            version_file = temp_dir / "package.json"
            version_file.write_text(
                '{"name": "foo", "version": "1.2.3"}',
                encoding="utf8",
            )
            main(["next", "dev", "--versioning-scheme", "pep440"])

        self.assertEqual(cm.exception.code, VersionExitCode.SUCCESS)
        self.assertEqual(out.getvalue(), "1.2.4.dev1\n")

    def test_next_patch(self):
        with (
            temp_directory(change_into=True) as temp_dir,
            redirect_stdout(StringIO()) as out,
            self.assertRaises(SystemExit) as cm,
        ):
            version_file = temp_dir / "package.json"
            version_file.write_text(
                '{"name": "foo", "version": "1.2.3"}',
                encoding="utf8",
            )
            main(["next", "patch", "--versioning-scheme", "pep440"])

        self.assertEqual(cm.exception.code, VersionExitCode.SUCCESS)
        self.assertEqual(out.getvalue(), "1.2.4\n")

    def test_next_minor(self):
        with (
            temp_directory(change_into=True) as temp_dir,
            redirect_stdout(StringIO()) as out,
            self.assertRaises(SystemExit) as cm,
        ):
            version_file = temp_dir / "package.json"
            version_file.write_text(
                '{"name": "foo", "version": "1.2.3"}',
                encoding="utf8",
            )
            main(["next", "minor", "--versioning-scheme", "pep440"])

        self.assertEqual(cm.exception.code, VersionExitCode.SUCCESS)
        self.assertEqual(out.getvalue(), "1.3.0\n")

    def test_next_major(self):
        with (
            temp_directory(change_into=True) as temp_dir,
            redirect_stdout(StringIO()) as out,
            self.assertRaises(SystemExit) as cm,
        ):
            version_file = temp_dir / "package.json"
            version_file.write_text(
                '{"name": "foo", "version": "1.2.3"}',
                encoding="utf8",
            )
            main(["next", "major", "--versioning-scheme", "pep440"])

        self.assertEqual(cm.exception.code, VersionExitCode.SUCCESS)
        self.assertEqual(out.getvalue(), "2.0.0\n")

    def test_next_alpha(self):
        with (
            temp_directory(change_into=True) as temp_dir,
            redirect_stdout(StringIO()) as out,
            self.assertRaises(SystemExit) as cm,
        ):
            version_file = temp_dir / "package.json"
            version_file.write_text(
                '{"name": "foo", "version": "1.2.3"}',
                encoding="utf8",
            )
            main(["next", "alpha", "--versioning-scheme", "semver"])

        self.assertEqual(cm.exception.code, VersionExitCode.SUCCESS)
        self.assertEqual(out.getvalue(), "1.2.4-alpha1\n")

    def test_next_beta(self):
        with (
            temp_directory(change_into=True) as temp_dir,
            redirect_stdout(StringIO()) as out,
            self.assertRaises(SystemExit) as cm,
        ):
            version_file = temp_dir / "package.json"
            version_file.write_text(
                '{"name": "foo", "version": "1.2.3"}',
                encoding="utf8",
            )
            main(["next", "beta", "--versioning-scheme", "semver"])

        self.assertEqual(cm.exception.code, VersionExitCode.SUCCESS)
        self.assertEqual(out.getvalue(), "1.2.4-beta1\n")

    def test_next_rc(self):
        with (
            temp_directory(change_into=True) as temp_dir,
            redirect_stdout(StringIO()) as out,
            self.assertRaises(SystemExit) as cm,
        ):
            version_file = temp_dir / "package.json"
            version_file.write_text(
                '{"name": "foo", "version": "1.2.3"}',
                encoding="utf8",
            )
            main(["next", "rc", "--versioning-scheme", "semver"])

        self.assertEqual(cm.exception.code, VersionExitCode.SUCCESS)
        self.assertEqual(out.getvalue(), "1.2.4-rc1\n")

    def test_next_calendar(self):
        today = datetime.today()
        with (
            temp_directory(change_into=True) as temp_dir,
            redirect_stdout(StringIO()) as out,
            self.assertRaises(SystemExit) as cm,
        ):
            version_file = temp_dir / "package.json"
            version_file.write_text(
                '{"name": "foo", "version": "1.2.3"}',
                encoding="utf8",
            )
            main(["next", "calendar", "--versioning-scheme", "pep440"])

        self.assertEqual(cm.exception.code, VersionExitCode.SUCCESS)
        self.assertEqual(
            out.getvalue(), f"{today.year % 100}.{today.month}.0\n"
        )
pontos-25.3.2/tests/version/test_parser.py000066400000000000000000000007621476255566300207110ustar00rootroot00000000000000# Copyright (C) 2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later
#

import unittest
from contextlib import redirect_stderr
from io import StringIO

from pontos.version._parser import parse_args


class ParserTestCase(unittest.TestCase):
    def test_error_while_parsing(self):
        with redirect_stderr(StringIO()), self.assertRaises(SystemExit) as cm:
            parse_args(["update", "foo"])

        # exception code in argparse
        self.assertEqual(cm.exception.code, 2)
pontos-25.3.2/tests/version/test_project.py000066400000000000000000000126751476255566300210710ustar00rootroot00000000000000# Copyright (C) 2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later
#

import unittest

from pontos.testing import temp_directory, temp_python_module
from pontos.version.project import Project, ProjectError
from pontos.version.schemes import PEP440VersioningScheme


class ProjectTestCase(unittest.TestCase):
    def test_no_project_found(self):
        with (
            temp_directory(change_into=True),
            self.assertRaisesRegex(
                ProjectError, "No project settings file found"
            ),
        ):
            Project(PEP440VersioningScheme)

    def test_python_project(self):
        current_version = PEP440VersioningScheme.parse_version("1.2.3")
        new_version = PEP440VersioningScheme.parse_version("1.2.4")

        content = f"__version__ = '{current_version}'"
        with temp_python_module(
            content, name="foo", change_into=True
        ) as temp_module:
            pyproject_toml = temp_module.parent / "pyproject.toml"
            pyproject_toml.write_text(
                f'[tool.poetry]\nversion = "{current_version}"\n'
                '[tool.pontos.version]\nversion-module-file = "foo.py"',
                encoding="utf8",
            )

            project = Project(PEP440VersioningScheme)
            self.assertEqual(project.get_current_version(), current_version)

            update = project.update_version(new_version)

            self.assertEqual(update.previous, current_version)
            self.assertEqual(update.new, new_version)

            self.assertEqual(len(update.changed_files), 2)

    def test_go_project(self):
        current_version = PEP440VersioningScheme.parse_version("1.2.3")
        new_version = PEP440VersioningScheme.parse_version("1.2.4")

        with temp_directory(change_into=True) as temp_dir:
            project_file = temp_dir / "go.mod"
            project_file.touch()
            version_file = temp_dir / "version.go"
            version_file.write_text(f'var version = "{current_version}"')

            project = Project(PEP440VersioningScheme)
            self.assertEqual(project.get_current_version(), current_version)

            update = project.update_version(new_version)

            self.assertEqual(update.previous, current_version)
            self.assertEqual(update.new, new_version)

            self.assertEqual(len(update.changed_files), 1)

    def test_javascript_project(self):
        current_version = PEP440VersioningScheme.parse_version("1.2.3")
        new_version = PEP440VersioningScheme.parse_version("1.2.4")

        with temp_directory(change_into=True) as temp_dir:
            version_file = temp_dir / "package.json"
            version_file.write_text(
                f'{{"name": "foo", "version": "{current_version}"}}',
                encoding="utf8",
            )

            project = Project(PEP440VersioningScheme)
            self.assertEqual(project.get_current_version(), current_version)

            update = project.update_version(new_version)

            self.assertEqual(update.previous, current_version)
            self.assertEqual(update.new, new_version)

            self.assertEqual(len(update.changed_files), 1)

    def test_cmake_project_version(self):
        current_version = PEP440VersioningScheme.parse_version("1.2.3")
        new_version = PEP440VersioningScheme.parse_version("1.2.4")

        with temp_directory(change_into=True) as temp_dir:
            version_file = temp_dir / "CMakeLists.txt"
            version_file.write_text("project(VERSION 1.2.3)", encoding="utf8")

            project = Project(PEP440VersioningScheme)
            self.assertEqual(project.get_current_version(), current_version)

            update = project.update_version(new_version)

            self.assertEqual(update.previous, current_version)
            self.assertEqual(update.new, new_version)

            self.assertEqual(len(update.changed_files), 1)

    def test_all(self):
        current_version = PEP440VersioningScheme.parse_version("1.2.3")
        new_version = PEP440VersioningScheme.parse_version("1.2.4")

        content = f"__version__ = '{current_version}'"
        with temp_python_module(
            content, name="foo", change_into=True
        ) as temp_module:
            temp_dir = temp_module.parent
            pyproject_toml = temp_dir / "pyproject.toml"
            pyproject_toml.write_text(
                f'[tool.poetry]\nversion = "{current_version}"\n'
                '[tool.pontos.version]\nversion-module-file = "foo.py"',
                encoding="utf8",
            )

            go_mod_file = temp_dir / "go.mod"
            go_mod_file.touch()
            go_version_file = temp_dir / "version.go"
            go_version_file.write_text(f'var version = "{current_version}"')

            package_json = temp_dir / "package.json"
            package_json.write_text(
                f'{{"name": "foo", "version": "{current_version}"}}',
                encoding="utf8",
            )

            cmake_lists_txt = temp_dir / "CMakeLists.txt"
            cmake_lists_txt.write_text(
                "project(VERSION 1.2.3)", encoding="utf8"
            )

            project = Project(PEP440VersioningScheme)
            self.assertEqual(project.get_current_version(), current_version)

            update = project.update_version(new_version)

            self.assertEqual(update.previous, current_version)
            self.assertEqual(update.new, new_version)

            self.assertEqual(len(update.changed_files), 5)
pontos-25.3.2/tests/version/test_version.py000066400000000000000000000022051476255566300210740ustar00rootroot00000000000000# Copyright (C) 2023 Greenbone AG
#
# SPDX-License-Identifier: GPL-3.0-or-later
#

import unittest

from pontos.version.schemes._pep440 import PEP440Version
from pontos.version.schemes._semantic import SemanticVersion


class VersionTestCase(unittest.TestCase):
    def test_equal(self):
        self.assertEqual(PEP440Version("22.2.2"), PEP440Version("22.2.2"))
        self.assertNotEqual(PEP440Version("22.2.1"), PEP440Version("22.2.2"))
        self.assertNotEqual(SemanticVersion("22.2.2"), "22.2.2")
        self.assertNotEqual(
            SemanticVersion("22.2.1"), SemanticVersion("22.2.2")
        )
        self.assertNotEqual(PEP440Version("22.2.2"), "22.2.2")

    def test_equal_other(self):
        self.assertEqual(PEP440Version("22.2.2"), SemanticVersion("22.2.2"))

    def test_equal_other_other_way_around(self):
        self.assertEqual(SemanticVersion("22.2.2"), PEP440Version("22.2.2"))

    def test_equal_raises(self):
        with self.assertRaises(ValueError):
            self.assertNotEqual(SemanticVersion("22.2.2"), 22)

        with self.assertRaises(ValueError):
            self.assertNotEqual(PEP440Version("22.2.2"), 22)