pax_global_header00006660000000000000000000000064147567043520014527gustar00rootroot0000000000000052 comment=26bb37cfab71a5a372e3db0f48a6eac57519a4a6 python-semantic-release-9.21.0/000077500000000000000000000000001475670435200163605ustar00rootroot00000000000000python-semantic-release-9.21.0/.dockerignore000066400000000000000000000001021475670435200210250ustar00rootroot00000000000000tests build dist docs htmlcov .git .mypy_cache .pytest_cache .tox python-semantic-release-9.21.0/.gitattributes000066400000000000000000000001211475670435200212450ustar00rootroot00000000000000# https://help.github.com/articles/dealing-with-line-endings/ * text=auto eol=lf python-semantic-release-9.21.0/.github/000077500000000000000000000000001475670435200177205ustar00rootroot00000000000000python-semantic-release-9.21.0/.github/ISSUE_TEMPLATE/000077500000000000000000000000001475670435200221035ustar00rootroot00000000000000python-semantic-release-9.21.0/.github/ISSUE_TEMPLATE/bug-report.md000066400000000000000000000037761475670435200245300ustar00rootroot00000000000000--- name: Bug Report about: Something isn't working as expected labels: - bug - triage --- ## Bug Report ### Description ### Expected behavior ### Actual behavior ### Environment - **Operating System (w/ version):** - **Python version:** - **Pip version:** - **Semantic-release version:** - **Build tool (w/ version):**
pip freeze ```log ```

git log --oneline --decorate --graph --all -n 50 ```log ```
### Configuration
Semantic Release Configuration ```toml ```

Build System Configuration ```toml ```

GitHub Actions Job Definition ```yaml ```
### Execution Log
semantic-release -vv command ```log ```
### Additional context python-semantic-release-9.21.0/.github/ISSUE_TEMPLATE/documentation.md000066400000000000000000000004461475670435200253020ustar00rootroot00000000000000--- name: Documentation about: I found an error or gap in the docs labels: - docs - triage --- ## Documentation Error ### Description python-semantic-release-9.21.0/.github/ISSUE_TEMPLATE/feature_request.md000066400000000000000000000006761475670435200256410ustar00rootroot00000000000000--- name: Feature Request about: Suggest a new idea labels: - feature - triage --- ## Feature Request ### Description ### Use cases ### Possible implementation ### Alternative solutions python-semantic-release-9.21.0/.github/ISSUE_TEMPLATE/question.md000066400000000000000000000014711475670435200242770ustar00rootroot00000000000000--- name: Question about: I have a question about Python Semantic Release labels: - question - triage --- ## Question ### Configuration
Semantic Release Configuration ```toml ```
## Additional context
git log --oneline --decorate --graph --all -n 50 ```log ```
python-semantic-release-9.21.0/.github/PULL_REQUEST_TEMPLATE.md000066400000000000000000000031411475670435200235200ustar00rootroot00000000000000 ## Purpose ## Rationale ## How did you test? ## How to Verify --- ## PR Completion Checklist - [ ] Reviewed & followed the [Contributor Guidelines](https://python-semantic-release.readthedocs.io/en/latest/contributing.html) - [ ] Changes Implemented & Validation pipeline succeeds - [ ] Commits follow the [Conventional Commits](https://www.conventionalcommits.org/en/v1.0.0/) standard and are separated into the proper commit type and scope (recommended order: test, build, feat/fix, docs) - [ ] Appropriate Unit tests added/updated - [ ] Appropriate End-to-End tests added/updated - [ ] Appropriate Documentation added/updated and syntax validated for sphinx build (see Contributor Guidelines) python-semantic-release-9.21.0/.github/changed-files-spec.yml000066400000000000000000000003631475670435200240660ustar00rootroot00000000000000--- build: - MANIFEST.in - Dockerfile - .dockerignore - scripts/** docs: - docs/** - README.rst - AUTHORS.rst - CONTRIBUTING.rst - CHANGELOG.rst src: - src/** - pyproject.toml tests: - tests/** python-semantic-release-9.21.0/.github/dependabot.yaml000066400000000000000000000017531475670435200227170ustar00rootroot00000000000000--- version: 2 updates: - package-ecosystem: "pip" directory: "/" schedule: interval: "weekly" day: "monday" time: "18:00" commit-message: prefix: "build" include: "scope" labels: - dependencies - dependabot open-pull-requests-limit: 10 rebase-strategy: auto versioning-strategy: "increase-if-necessary" # Maintain dependencies for Docker (ie our GitHub Action) - package-ecosystem: "docker" directory: "/" schedule: interval: "monthly" labels: - dependencies - dependabot rebase-strategy: auto commit-message: prefix: "build" include: "scope" # (deps) - package-ecosystem: "github-actions" directory: "/" schedule: interval: "weekly" day: "monday" time: "18:00" commit-message: prefix: "ci" labels: - dependencies - dependabot rebase-strategy: auto groups: github-actions: patterns: - "*" python-semantic-release-9.21.0/.github/workflows/000077500000000000000000000000001475670435200217555ustar00rootroot00000000000000python-semantic-release-9.21.0/.github/workflows/ci.yml000066400000000000000000000072761475670435200231070ustar00rootroot00000000000000name: CI on: pull_request: types: [opened, synchronize, reopened, ready_for_review] branches: # Target branches - master # default token permissions = none permissions: {} # If a new push is made to the branch, cancel the previous run concurrency: group: ${{ github.workflow }}-${{ github.ref }} cancel-in-progress: true jobs: commitlint: # condition: Execute IFF it is protected branch update, or a PR that is NOT in a draft state if: ${{ github.event_name != 'pull_request' || !github.event.pull_request.draft }} runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 with: fetch-depth: 0 - uses: wagoid/commitlint-github-action@v6 eval-changes: name: Evaluate changes # condition: Execute IFF it is protected branch update, or a PR that is NOT in a draft state if: ${{ github.event_name != 'pull_request' || !github.event.pull_request.draft }} runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 with: fetch-depth: 100 - name: Evaluate | Check common file types for changes id: core-changed-files uses: tj-actions/changed-files@v45.0.7 with: files_yaml_from_source_file: .github/changed-files-spec.yml - name: Evaluate | Check specific file types for changes id: ci-changed-files uses: tj-actions/changed-files@v45.0.7 with: files_yaml: | ci: - .github/workflows/ci.yml - .github/workflows/validate.yml - name: Evaluate | Detect if any of the combinations of file sets have changed id: all-changes run: | printf '%s\n' "any_changed=false" >> $GITHUB_OUTPUT if [ "${{ steps.core-changed-files.outputs.build_any_changed }}" == "true" ] || \ [ "${{ steps.ci-changed-files.outputs.ci_any_changed }}" == "true" ] || \ [ "${{ steps.core-changed-files.outputs.docs_any_changed }}" == "true" ] || \ [ "${{ steps.core-changed-files.outputs.src_any_changed }}" == "true" ] || \ [ "${{ steps.core-changed-files.outputs.tests_any_changed }}" == "true" ]; then printf '%s\n' "any_changed=true" >> $GITHUB_OUTPUT fi outputs: # essentially casts the string output to a boolean for GitHub any-file-changes: ${{ steps.all-changes.outputs.any_changed }} build-changes: ${{ steps.core-changed-files.outputs.build_any_changed }} ci-changes: ${{ steps.ci-changed-files.outputs.ci_any_changed }} doc-changes: ${{ steps.core-changed-files.outputs.docs_any_changed }} src-changes: ${{ steps.core-changed-files.outputs.src_any_changed }} test-changes: ${{ steps.core-changed-files.outputs.tests_any_changed }} validate: needs: eval-changes uses: ./.github/workflows/validate.yml with: python-versions-linux: '["3.8", "3.9", "3.10", "3.11", "3.12", "3.13"]' # Since the test suite takes ~4 minutes to complete on windows, and windows is billed higher # we are only going to run it on the oldest version of python we support. The older version # will be the most likely area to fail as newer minor versions maintain compatibility. python-versions-windows: '["3.8"]' files-changed: ${{ needs.eval-changes.outputs.any-file-changes }} build-files-changed: ${{ needs.eval-changes.outputs.build-changes }} ci-files-changed: ${{ needs.eval-changes.outputs.ci-changes }} doc-files-changed: ${{ needs.eval-changes.outputs.doc-changes }} src-files-changed: ${{ needs.eval-changes.outputs.src-changes }} test-files-changed: ${{ needs.eval-changes.outputs.test-changes }} permissions: {} secrets: {} python-semantic-release-9.21.0/.github/workflows/cicd.yml000066400000000000000000000200131475670435200233760ustar00rootroot00000000000000--- name: CI/CD on: push: branches: - master - release/** # default token permissions = none permissions: {} jobs: eval-changes: name: Evaluate changes runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 with: fetch-depth: 100 # Must at least retrieve a set of commits to compare changes # primarily because of any 'Rebase and Merge' PR action in GitHub - name: Evaluate | Check common file types for changes id: core-changed-files uses: tj-actions/changed-files@v45.0.7 with: base_sha: ${{ github.event.push.before }} files_yaml_from_source_file: .github/changed-files-spec.yml - name: Evaluate | Check specific file types for changes id: ci-changed-files uses: tj-actions/changed-files@v45.0.7 with: base_sha: ${{ github.event.push.before }} files_yaml: | ci: - .github/workflows/cicd.yml - .github/workflows/validate.yml - name: Evaluate | Detect if any of the combinations of file sets have changed id: all-changes run: | printf '%s\n' "any_changed=false" >> $GITHUB_OUTPUT if [ "${{ steps.core-changed-files.outputs.build_any_changed }}" == "true" ] || \ [ "${{ steps.ci-changed-files.outputs.ci_any_changed }}" == "true" ] || \ [ "${{ steps.core-changed-files.outputs.docs_any_changed }}" == "true" ] || \ [ "${{ steps.core-changed-files.outputs.src_any_changed }}" == "true" ] || \ [ "${{ steps.core-changed-files.outputs.tests_any_changed }}" == "true" ]; then printf '%s\n' "any_changed=true" >> $GITHUB_OUTPUT fi outputs: any-file-changes: ${{ steps.all-changes.outputs.any_changed }} build-changes: ${{ steps.core-changed-files.outputs.build_any_changed }} ci-changes: ${{ steps.ci-changed-files.outputs.ci_any_changed }} doc-changes: ${{ steps.core-changed-files.outputs.docs_any_changed }} src-changes: ${{ steps.core-changed-files.outputs.src_any_changed }} test-changes: ${{ steps.core-changed-files.outputs.tests_any_changed }} validate: uses: ./.github/workflows/validate.yml needs: eval-changes concurrency: group: ${{ github.workflow }}-validate-${{ github.ref_name }} cancel-in-progress: true with: python-versions-linux: '["3.8", "3.9", "3.10", "3.11", "3.12", "3.13"]' python-versions-windows: '["3.8", "3.9", "3.10", "3.11", "3.12", "3.13"]' files-changed: ${{ needs.eval-changes.outputs.any-file-changes }} build-files-changed: ${{ needs.eval-changes.outputs.build-changes }} ci-files-changed: ${{ needs.eval-changes.outputs.ci-changes }} doc-files-changed: ${{ needs.eval-changes.outputs.doc-changes }} src-files-changed: ${{ needs.eval-changes.outputs.src-changes }} test-files-changed: ${{ needs.eval-changes.outputs.test-changes }} permissions: {} secrets: {} release: name: Semantic Release runs-on: ubuntu-latest needs: validate if: ${{ needs.validate.outputs.new-release-detected == 'true' }} concurrency: group: ${{ github.workflow }}-release-${{ github.ref_name }} cancel-in-progress: false permissions: contents: write env: GITHUB_ACTIONS_AUTHOR_NAME: github-actions GITHUB_ACTIONS_AUTHOR_EMAIL: actions@users.noreply.github.com steps: # Note: We checkout the repository at the branch that triggered the workflow # with the entire history to ensure to match PSR's release branch detection # and history evaluation. # However, we forcefully reset the branch to the workflow sha because it is # possible that the branch was updated while the workflow was running. This # prevents accidentally releasing un-evaluated changes. - name: Setup | Checkout Repository on Release Branch uses: actions/checkout@v4 with: ref: ${{ github.ref_name }} fetch-depth: 0 - name: Setup | Force release branch to be at workflow sha run: | git reset --hard ${{ github.sha }} - name: Setup | Download Build Artifacts uses: actions/download-artifact@v4 id: artifact-download with: name: ${{ needs.validate.outputs.distribution-artifacts }} path: dist - name: Release | Bump Version in Docs if: needs.validate.outputs.new-release-is-prerelease == 'false' env: NEW_VERSION: ${{ needs.validate.outputs.new-release-version }} NEW_RELEASE_TAG: ${{ needs.validate.outputs.new-release-tag }} run: | python -m scripts.bump_version_in_docs git add docs/* - name: Evaluate | Verify upstream has NOT changed # Last chance to abort before causing an error as another PR/push was applied to the upstream branch # while this workflow was running. This is important because we are committing a version change shell: bash run: bash .github/workflows/verify_upstream.sh - name: Release | Python Semantic Release id: release uses: python-semantic-release/python-semantic-release@v9.20.0 with: github_token: ${{ secrets.GITHUB_TOKEN }} root_options: "-v" build: false - name: Release | Add distribution artifacts to GitHub Release Assets uses: python-semantic-release/publish-action@v9.20.0 if: steps.release.outputs.released == 'true' with: github_token: ${{ secrets.GITHUB_TOKEN }} tag: ${{ steps.release.outputs.tag }} - name: Release | Update Minor Release Tag Reference if: steps.release.outputs.released == 'true' && steps.release.outputs.is_prerelease == 'false' env: FULL_VERSION_TAG: ${{ steps.release.outputs.tag }} GIT_COMMITTER_NAME: ${{ env.GITHUB_ACTIONS_AUTHOR_NAME }} GIT_COMMITTER_EMAIL: ${{ env.GITHUB_ACTIONS_AUTHOR_EMAIL }} run: | MINOR_VERSION_TAG="$(echo "$FULL_VERSION_TAG" | cut -d. -f1,2)" git tag --force --annotate "$MINOR_VERSION_TAG" "${FULL_VERSION_TAG}^{}" -m "$MINOR_VERSION_TAG" git push -u origin "$MINOR_VERSION_TAG" --force - name: Release | Update Major Release Tag Reference if: steps.release.outputs.released == 'true' && steps.release.outputs.is_prerelease == 'false' env: FULL_VERSION_TAG: ${{ steps.release.outputs.tag }} GIT_COMMITTER_NAME: ${{ env.GITHUB_ACTIONS_AUTHOR_NAME }} GIT_COMMITTER_EMAIL: ${{ env.GITHUB_ACTIONS_AUTHOR_EMAIL }} run: | MAJOR_VERSION_TAG="$(echo "$FULL_VERSION_TAG" | cut -d. -f1)" git tag --force --annotate "$MAJOR_VERSION_TAG" "${FULL_VERSION_TAG}^{}" -m "$MAJOR_VERSION_TAG" git push -u origin "$MAJOR_VERSION_TAG" --force outputs: released: ${{ steps.release.outputs.released || 'false' }} new-release-version: ${{ steps.release.outputs.version }} new-release-tag: ${{ steps.release.outputs.tag }} deploy: name: Deploy runs-on: ubuntu-latest if: ${{ needs.release.outputs.released == 'true' && github.repository == 'python-semantic-release/python-semantic-release' }} needs: - validate - release environment: name: pypi url: https://pypi.org/project/python-semantic-release/ permissions: # https://docs.github.com/en/rest/overview/permissions-required-for-github-apps?apiVersion=2022-11-28#metadata id-token: write # needed for PyPI upload steps: - name: Setup | Download Build Artifacts uses: actions/download-artifact@v4 id: artifact-download with: name: ${{ needs.validate.outputs.distribution-artifacts }} path: dist # see https://docs.pypi.org/trusted-publishers/ - name: Publish package distributions to PyPI id: pypi-publish uses: pypa/gh-action-pypi-publish@v1.12.4 with: packages-dir: dist print-hash: true verbose: true python-semantic-release-9.21.0/.github/workflows/manual.yml000066400000000000000000000104451475670435200237610ustar00rootroot00000000000000name: CI (Manual) on: # Enable execution directly from Actions page workflow_dispatch: inputs: linux: description: 'Test on Linux?' type: boolean required: true default: true windows: description: 'Test on Windows?' type: boolean required: true default: true python3-13: description: 'Test Python 3.13?' type: boolean required: true default: true python3-12: description: 'Test Python 3.12?' type: boolean required: true default: true python3-11: description: 'Test Python 3.11?' type: boolean required: true default: true python3-10: description: 'Test Python 3.10?' type: boolean required: true default: true python3-9: description: 'Test Python 3.9?' type: boolean required: true default: true python3-8: description: 'Test Python 3.8?' type: boolean required: true default: true # default token permissions = none permissions: {} env: COMMON_PYTHON_VERSION: '3.11' jobs: eval-input: name: Evaluate inputs runs-on: ubuntu-latest steps: - name: Setup | Install Python ${{ env.COMMON_PYTHON_VERSION }} uses: actions/setup-python@v5 with: python-version: ${{ env.COMMON_PYTHON_VERSION }} - name: Setup | Write file uses: DamianReeves/write-file-action@v1.3 with: path: .github/manual_eval_input.py write-mode: overwrite contents: | import json, os version_list = list(filter(None, [ "3.8" if str(os.getenv("INPUT_PY3_8", False)).lower() == str(True).lower() else None, "3.9" if str(os.getenv("INPUT_PY3_9", False)).lower() == str(True).lower() else None, "3.10" if str(os.getenv("INPUT_PY3_10", False)).lower() == str(True).lower() else None, "3.11" if str(os.getenv("INPUT_PY3_11", False)).lower() == str(True).lower() else None, "3.12" if str(os.getenv("INPUT_PY3_12", False)).lower() == str(True).lower() else None, "3.13" if str(os.getenv("INPUT_PY3_13", False)).lower() == str(True).lower() else None, ])) linux_versions = ( version_list if str(os.getenv("INPUT_LINUX", False)).lower() == str(True).lower() else [] ) windows_versions = ( version_list if str(os.getenv("INPUT_WINDOWS", False)).lower() == str(True).lower() else [] ) print(f"PYTHON_VERSIONS_LINUX={json.dumps(linux_versions)}") print(f"PYTHON_VERSIONS_WINDOWS={json.dumps(windows_versions)}") - name: Evaluate | Generate Test Matrix id: test-matrix env: INPUT_PY3_8: ${{ inputs.python3-8 }} INPUT_PY3_9: ${{ inputs.python3-9 }} INPUT_PY3_10: ${{ inputs.python3-10 }} INPUT_PY3_11: ${{ inputs.python3-11 }} INPUT_PY3_12: ${{ inputs.python3-12 }} INPUT_PY3_13: ${{ inputs.python3-13 }} INPUT_LINUX: ${{ inputs.linux }} INPUT_WINDOWS: ${{ inputs.windows }} run: | if ! vars="$(python3 .github/manual_eval_input.py)"; then printf '%s\n' "::error::Failed to evaluate input" exit 1 fi printf '%s\n' "$vars" printf '%s\n' "$vars" >> $GITHUB_OUTPUT outputs: python-versions-linux: ${{ steps.test-matrix.outputs.PYTHON_VERSIONS_LINUX }} python-versions-windows: ${{ steps.test-matrix.outputs.PYTHON_VERSIONS_WINDOWS }} validate: needs: eval-input uses: ./.github/workflows/validate.yml with: python-versions-linux: ${{ needs.eval-input.outputs.python-versions-linux }} python-versions-windows: ${{ needs.eval-input.outputs.python-versions-windows }} # There is no way to check for file changes on a manual workflow so # we just assume everything has changed build-files-changed: true ci-files-changed: true doc-files-changed: true src-files-changed: true test-files-changed: true files-changed: true permissions: {} secrets: {} python-semantic-release-9.21.0/.github/workflows/stale.yml000066400000000000000000000126231475670435200236140ustar00rootroot00000000000000name: 'Stale Bot' on: schedule: # Execute Daily at 7:15 AM UTC - cron: '15 7 * * *' # Default token permissions = None permissions: {} jobs: stale: runs-on: ubuntu-latest permissions: contents: read issues: write pull-requests: write actions: write # required to delete/update cache env: STALE_ISSUE_WARNING_DAYS: 90 STALE_ISSUE_CLOSURE_DAYS: 7 STALE_PR_WARNING_DAYS: 60 STALE_PR_CLOSURE_DAYS: 10 UNRESPONSIVE_WARNING_DAYS: 14 UNRESPONSIVE_CLOSURE_DAYS: 7 REMINDER_WINDOW: 60 OPERATIONS_RATE_LIMIT: 330 # 1000 api/hr / 3 jobs steps: - name: Stale Issues/PRs uses: actions/stale@v9 with: # default: 30, GitHub Actions API Rate limit is 1000/hr operations-per-run: ${{ env.OPERATIONS_RATE_LIMIT }} # exempt-all-milestones: false (default) # exempt-all-assignees: false (default) stale-issue-label: stale days-before-issue-stale: ${{ env.STALE_ISSUE_WARNING_DAYS }} days-before-issue-close: ${{ env.STALE_ISSUE_CLOSURE_DAYS }} exempt-issue-labels: confirmed, help-wanted, info stale-issue-message: > This issue is stale because it has not been confirmed or planned by the maintainers and has been open ${{ env.STALE_ISSUE_WARNING_DAYS }} days with no recent activity. It will be closed in ${{ env.STALE_ISSUE_CLOSURE_DAYS }} days, if no further activity occurs. Thank you for your contributions. close-issue-message: > This issue was closed due to lack of activity. # PR Configurations stale-pr-label: stale days-before-pr-stale: ${{ env.STALE_PR_WARNING_DAYS }} days-before-pr-close: ${{ env.STALE_PR_CLOSURE_DAYS }} exempt-pr-labels: confirmed, dependabot stale-pr-message: > This PR is stale because it has not been confirmed or considered ready for merge by the maintainers but has been open ${{ env.STALE_PR_WARNING_DAYS }} days with no recent activity. It will be closed in ${{ env.STALE_PR_CLOSURE_DAYS }} days, if no further activity occurs. Please make sure to add the proper testing, docs, and descriptions of changes before your PR can be merged. Thank you for your contributions. close-pr-message: > This PR was closed due to lack of activity. - name: Unresponsive Issues/PRs # Closes issues rapidly when submitter is unresponsive. The timer is initiated # by maintainer by placing the awaiting-reply label on the issue or PR. From # that point the submitter has 14 days before a reminder/warning is given. If # no response has been received within 3 weeks, the issue is closed. There are # no exemptions besides removing the awaiting-reply label. uses: actions/stale@v9 with: # GitHub Actions API Rate limit is 1000/hr operations-per-run: ${{ env.OPERATIONS_RATE_LIMIT }} only-labels: awaiting-reply stale-issue-label: unresponsive stale-pr-label: unresponsive remove-stale-when-updated: awaiting-reply days-before-stale: ${{ env.UNRESPONSIVE_WARNING_DAYS }} days-before-close: ${{ env.UNRESPONSIVE_CLOSURE_DAYS }} stale-issue-message: > This issue has not received a response in ${{ env.UNRESPONSIVE_WARNING_DAYS }} days. If no response is received in ${{ env.UNRESPONSIVE_CLOSURE_DAYS }} days, it will be closed. We look forward to hearing from you. close-issue-message: > This issue was closed because no response was received. stale-pr-message: > This PR has not received a response in ${{ env.UNRESPONSIVE_WARNING_DAYS }} days. If no response is received in ${{ env.UNRESPONSIVE_CLOSURE_DAYS }} days, it will be closed. We look forward to hearing from you. close-pr-message: > This PR was closed because no response was received. - name: Reminders on Confirmed Issues/PRs # Posts a reminder when confirmed issues are not updated in a timely manner. # The timer is initiated by a maintainer by placing the confirmed label on # the issue or PR (which prevents stale closure), however, to prevent it being # forgotten completely, this job will post a reminder message to the maintainers # No closures will occur and there are no exemptions besides removing the confirmed # label. uses: actions/stale@v9 with: # GitHub Actions API Rate limit is 1000/hr operations-per-run: ${{ env.OPERATIONS_RATE_LIMIT }} only-labels: confirmed stale-issue-label: needs-update stale-pr-label: needs-update days-before-stale: ${{ env.REMINDER_WINDOW }} days-before-close: -1 # never close stale-issue-message: > It has been ${{ env.REMINDER_WINDOW }} days since the last update on this confirmed issue. @python-semantic-release/team can you provide an update on the status of this issue? stale-pr-message: > It has been ${{ env.REMINDER_WINDOW }} days since the last update on this confirmed PR. @python-semantic-release/team can you provide an update on the status of this PR? python-semantic-release-9.21.0/.github/workflows/validate.yml000066400000000000000000000361171475670435200243010ustar00rootroot00000000000000--- name: Validation Pipeline on: # Enable workflow as callable from another workflow workflow_call: inputs: python-versions-linux: description: 'Python versions to test on Linux (JSON array)' required: true type: string python-versions-windows: description: 'Python versions to test on Windows (JSON array)' required: true type: string files-changed: description: 'Boolean string result for if any files have changed' type: string required: false default: 'false' build-files-changed: description: 'Boolean string result for if build files have changed' type: string required: false default: 'false' ci-files-changed: description: 'Boolean string result for if CI files have changed' type: string required: false default: 'false' doc-files-changed: description: 'Boolean string result for if documentation files have changed' type: string required: false default: 'false' src-files-changed: description: 'Boolean string result for if source files have changed' type: string required: false default: 'false' test-files-changed: description: 'Boolean string result for if test files have changed' type: string required: false default: 'false' outputs: new-release-detected: description: Boolean string result for if new release is available value: ${{ jobs.build.outputs.new-release-detected }} new-release-version: description: Version string for the new release value: ${{ jobs.build.outputs.new-release-version }} new-release-tag: description: Tag string for the new release value: ${{ jobs.build.outputs.new-release-tag }} new-release-is-prerelease: description: Boolean string result for if new release is a pre-release value: ${{ jobs.build.outputs.new-release-is-prerelease }} distribution-artifacts: description: Artifact Download name for the distribution artifacts value: ${{ jobs.build.outputs.distribution-artifacts }} # secrets: none required ATT # set default Token permissions = none permissions: {} env: LOWEST_PYTHON_VERSION: '3.8' COMMON_PYTHON_VERSION: '3.11' jobs: build: name: Build runs-on: ubuntu-latest if: ${{ inputs.build-files-changed == 'true' || inputs.src-files-changed == 'true' || inputs.test-files-changed == 'true' || inputs.ci-files-changed == 'true' }} steps: - name: Setup | Checkout Repository at workflow sha uses: actions/checkout@v4 with: ref: ${{ github.sha }} fetch-depth: 0 - name: Setup | Force correct release branch on workflow sha run: | git checkout -B ${{ github.ref_name }} - name: Setup | Install Python ${{ env.COMMON_PYTHON_VERSION }} uses: actions/setup-python@v5 with: python-version: ${{ env.COMMON_PYTHON_VERSION }} cache: 'pip' - name: Setup | Install dependencies run: | python -m pip install --upgrade pip setuptools wheel pip install -e .[build] - name: Build | Build next version artifacts id: version uses: python-semantic-release/python-semantic-release@v9.20.0 with: github_token: "" root_options: "-v" build: true changelog: true commit: false push: false tag: false vcs_release: false - name: Build | Annotate next version if: steps.version.outputs.released == 'true' run: | printf '%s\n' "::notice::Next release will be '${{ steps.version.outputs.tag }}'" - name: Build | Create non-versioned distribution artifact if: steps.version.outputs.released == 'false' run: python -m build . - name: Build | Set distribution artifact variables id: build run: | printf '%s\n' "dist_dir=dist/*" >> $GITHUB_OUTPUT printf '%s\n' "artifacts_name=dist" >> $GITHUB_OUTPUT - name: Upload | Distribution Artifacts uses: actions/upload-artifact@v4 with: name: ${{ steps.build.outputs.artifacts_name }} path: ${{ steps.build.outputs.dist_dir }} if-no-files-found: error retention-days: 2 outputs: new-release-detected: ${{ steps.version.outputs.released }} new-release-version: ${{ steps.version.outputs.version }} new-release-tag: ${{ steps.version.outputs.tag }} new-release-is-prerelease: ${{ steps.version.outputs.is_prerelease }} distribution-artifacts: ${{ steps.build.outputs.artifacts_name }} unit-test: name: Unit Tests if: ${{ inputs.src-files-changed == 'true' || inputs.test-files-changed == 'true' || inputs.ci-files-changed == 'true' }} runs-on: ubuntu-latest steps: - name: Setup | Checkout Repository uses: actions/checkout@v4 with: ref: ${{ github.sha }} fetch-depth: 1 - name: Setup | Install Python ${{ env.LOWEST_PYTHON_VERSION }} uses: actions/setup-python@v5 with: python-version: ${{ env.LOWEST_PYTHON_VERSION }} cache: 'pip' - name: Setup | Install dependencies run: | python -m pip install --upgrade pip setuptools wheel pip install -e .[test] pip install pytest-github-actions-annotate-failures - name: Test | Run pytest -m unit --comprehensive id: tests run: | pytest \ -vv \ -nauto \ -m unit \ --comprehensive \ --cov=semantic_release \ --cov-context=test \ --cov-report=term-missing \ --cov-fail-under=60 \ --junit-xml=tests/reports/pytest-results.xml - name: Report | Upload Test Results uses: mikepenz/action-junit-report@v5.3.0 if: ${{ always() && steps.tests.outcome != 'skipped' }} with: report_paths: ./tests/reports/*.xml annotate_only: true test-linux: name: Python ${{ matrix.python-version }} on ${{ matrix.os }} E2E tests runs-on: ${{ matrix.os }} needs: - build - unit-test if: ${{ inputs.src-files-changed == 'true' || inputs.test-files-changed == 'true' || inputs.ci-files-changed == 'true' }} strategy: matrix: python-version: ${{ fromJson(inputs.python-versions-linux) }} os: - ubuntu-latest steps: - name: Setup | Checkout Repository uses: actions/checkout@v4 with: ref: ${{ github.sha }} fetch-depth: 1 - name: Setup | Install Python ${{ matrix.python-version }} uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} cache: 'pip' - name: Setup | Download Distribution Artifacts uses: actions/download-artifact@v4 with: name: ${{ needs.build.outputs.distribution-artifacts }} path: ./dist - name: Setup | Install dependencies id: install # To ensure we are testing our installed package (not the src code), we must # uninstall the editable install (symlink) first then install the distribution artifact. # Lastly, we ask python to give us the installation location of our distribution artifact # so that we can use it in the pytest command for coverage run: | python -m pip install --upgrade pip setuptools wheel pip install -e .[test] pip install pytest-github-actions-annotate-failures pip uninstall -y python-semantic-release pip install dist/python_semantic_release-*.whl python -c 'import pathlib, semantic_release; print(f"PKG_INSTALLED_DIR={pathlib.Path(semantic_release.__file__).resolve().parent}")' >> $GITHUB_OUTPUT - name: Test | Run pytest -m e2e --comprehensive id: tests run: | pytest \ -vv \ -nauto \ -m e2e \ --comprehensive \ --cov=${{ steps.install.outputs.PKG_INSTALLED_DIR }} \ --cov-context=test \ --cov-report=term-missing \ --cov-fail-under=70 \ --junit-xml=tests/reports/pytest-results.xml - name: Report | Upload Cached Repos on Failure uses: actions/upload-artifact@v4 if: ${{ failure() && steps.tests.outcome == 'failure' }} with: name: ${{ format('cached-repos-{0}-{1}', matrix.os, matrix.python-version) }} path: .pytest_cache/d/psr-* include-hidden-files: true if-no-files-found: error retention-days: 1 - name: Report | Upload Tested Repos on Failure uses: actions/upload-artifact@v4 if: ${{ failure() && steps.tests.outcome == 'failure' }} with: name: ${{ format('tested-repos-{0}-{1}', matrix.os, matrix.python-version) }} path: /tmp/pytest-of-runner/pytest-current/* include-hidden-files: true if-no-files-found: error retention-days: 1 - name: Report | Upload Test Results uses: mikepenz/action-junit-report@v5.3.0 if: ${{ always() && steps.tests.outcome != 'skipped' }} with: report_paths: ./tests/reports/*.xml annotate_only: true test-windows: name: Python ${{ matrix.python-version }} on ${{ matrix.os }} E2E tests runs-on: ${{ matrix.os }} needs: - build - unit-test if: ${{ inputs.src-files-changed == 'true' || inputs.test-files-changed == 'true' || inputs.ci-files-changed == 'true' }} strategy: matrix: python-version: ${{ fromJson(inputs.python-versions-windows) }} os: [windows-latest] steps: - name: Setup | Checkout Repository uses: actions/checkout@v4 with: ref: ${{ github.sha }} fetch-depth: 1 - name: Setup | Install Python ${{ matrix.python-version }} uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} cache: 'pip' - name: Setup | Download Distribution Artifacts uses: actions/download-artifact@v4 with: name: ${{ needs.build.outputs.distribution-artifacts }} path: dist - name: Setup | Install dependencies id: install # To ensure we are testing our installed package (not the src code), we must # uninstall the editable install (symlink) first then install the distribution artifact. # Lastly, we ask python to give us the installation location of our distribution artifact # so that we can use it in the pytest command for coverage shell: pwsh run: | $ErrorActionPreference = 'stop' python -m pip install --upgrade pip setuptools wheel pip install -e .[test] pip install pytest-github-actions-annotate-failures pip uninstall -y python-semantic-release $psrWheelFile = Get-ChildItem dist\python_semantic_release-*.whl -File | Select-Object -Index 0 pip install "$psrWheelFile" python -c 'import pathlib, semantic_release; print(f"PKG_INSTALLED_DIR={pathlib.Path(semantic_release.__file__).resolve().parent}")' | Tee-Object -Variable cmdOutput echo $cmdOutput >> $env:GITHUB_OUTPUT - name: Test | Run pytest -m e2e id: tests shell: pwsh # env: # Required for GitPython to work on Windows because of getpass.getuser() # USERNAME: "runneradmin" # Because GHA is currently broken on Windows to pass these varables, we do it manually run: | $env:USERNAME = "runneradmin" pytest ` -vv ` -nauto ` -m e2e ` `--cov=${{ steps.install.outputs.PKG_INSTALLED_DIR }} ` `--cov-context=test ` `--cov-report=term-missing ` `--junit-xml=tests/reports/pytest-results.xml - name: Report | Upload Cached Repos on Failure uses: actions/upload-artifact@v4 if: ${{ failure() && steps.tests.outcome == 'failure' }} with: name: ${{ format('cached-repos-{0}-{1}', matrix.os, matrix.python-version) }} path: .pytest_cache/d/psr-* include-hidden-files: true if-no-files-found: error retention-days: 1 - name: Report | Upload Tested Repos on Failure uses: actions/upload-artifact@v4 if: ${{ failure() && steps.tests.outcome == 'failure' }} with: name: ${{ format('tested-repos-{0}-{1}', matrix.os, matrix.python-version) }} path: ~/AppData/Local/Temp/pytest-of-runneradmin/pytest-current/* include-hidden-files: true if-no-files-found: error retention-days: 1 - name: Report | Upload Test Results uses: mikepenz/action-junit-report@v5.3.0 if: ${{ always() && steps.tests.outcome != 'skipped' }} with: report_paths: ./tests/reports/*.xml annotate_only: true lint: name: Lint if: ${{ inputs.files-changed == 'true' }} runs-on: ubuntu-latest steps: - name: Setup | Checkout Repository uses: actions/checkout@v4 with: ref: ${{ github.sha }} fetch-depth: 1 - name: Setup | Install Python ${{ env.COMMON_PYTHON_VERSION }} uses: actions/setup-python@v5 with: python-version: ${{ env.COMMON_PYTHON_VERSION }} cache: 'pip' - name: Setup | Install dependencies run: | python -m pip install --upgrade pip setuptools wheel pip install -e .[dev,mypy,test] # needs test because we run mypy over the tests as well and without the dependencies # mypy will throw import errors - name: Lint | Ruff Evaluation id: lint run: | ruff check \ --config pyproject.toml \ --output-format=full \ --exit-non-zero-on-fix - name: Type-Check | MyPy Evaluation id: type-check if: ${{ always() && steps.lint.outcome != 'skipped' }} run: | mypy . - name: Format-Check | Ruff Evaluation id: format-check if: ${{ always() && steps.type-check.outcome != 'skipped' }} run: | ruff format --check --config pyproject.toml python-semantic-release-9.21.0/.github/workflows/verify_upstream.sh000066400000000000000000000017421475670435200255410ustar00rootroot00000000000000#!/bin/bash set -eu +o pipefail # Example output of `git status -sb`: # ## master...origin/master [behind 1] # M .github/workflows/verify_upstream.sh UPSTREAM_BRANCH_NAME="$(git status -sb | head -n 1 | cut -d' ' -f2 | grep -E '\.{3}' | cut -d'.' -f4)" printf '%s\n' "Upstream branch name: $UPSTREAM_BRANCH_NAME" set -o pipefail if [ -z "$UPSTREAM_BRANCH_NAME" ]; then printf >&2 '%s\n' "::error::Unable to determine upstream branch name!" exit 1 fi git fetch "${UPSTREAM_BRANCH_NAME%%/*}" if ! UPSTREAM_SHA="$(git rev-parse "$UPSTREAM_BRANCH_NAME")"; then printf >&2 '%s\n' "::error::Unable to determine upstream branch sha!" exit 1 fi HEAD_SHA="$(git rev-parse HEAD)" if [ "$HEAD_SHA" != "$UPSTREAM_SHA" ]; then printf >&2 '%s\n' "[HEAD SHA] $HEAD_SHA != $UPSTREAM_SHA [UPSTREAM SHA]" printf >&2 '%s\n' "::error::Upstream has changed, aborting release..." exit 1 fi printf '%s\n' "Verified upstream branch has not changed, continuing with release..." python-semantic-release-9.21.0/.gitignore000066400000000000000000000014571475670435200203570ustar00rootroot00000000000000# Byte-compiled / optimized / DLL files __pycache__/ *.py[cod] # C extensions *.so # Distribution / packaging .Python .venv env/ build/ develop-eggs/ dist/ downloads/ eggs/ .eggs/ lib/ lib64/ parts/ sdist/ var/ *.egg-info/ .installed.cfg *.egg venv/ # PyInstaller # Usually these files are written by a python script from a template # before PyInstaller builds the exe, so as to inject date/other infos into it. *.manifest *.spec # PyCharm .idea *.iml # VSCode .vscode/ # Installer logs pip-log.txt pip-delete-this-directory.txt # Unit test / coverage reports htmlcov/ .tox/ .coverage .coverage.* .cache nosetests.xml coverage.xml *,cover # Translations *.mo *.pot # Django stuff: *.log # Sphinx documentation docs/_build/ docs/api/ # PyBuilder target/ .pytest_cache .mypy_cache .python-version *.swp python-semantic-release-9.21.0/.pre-commit-config.yaml000066400000000000000000000050561475670435200226470ustar00rootroot00000000000000--- default_language_version: python: python3 exclude: "^CHANGELOG.md$" repos: # Meta hooks - repo: meta hooks: - id: check-hooks-apply - id: check-useless-excludes # Security & credential scanning/alerting - repo: https://github.com/pre-commit/pre-commit-hooks rev: v4.6.0 hooks: - id: debug-statements - id: detect-aws-credentials args: ["--allow-missing-credentials"] - id: detect-private-key - id: check-builtin-literals - id: check-yaml - id: check-toml - id: check-case-conflict - id: end-of-file-fixer - id: trailing-whitespace - id: check-merge-conflict - id: mixed-line-ending - id: check-ast - repo: https://github.com/asottile/pyupgrade rev: v3.16.0 hooks: - id: pyupgrade args: ["--py38-plus", "--keep-runtime-typing"] # Linters and validation - repo: https://github.com/astral-sh/ruff-pre-commit rev: v0.6.1 hooks: - id: ruff name: ruff (lint) args: - "--fix" - "--exit-non-zero-on-fix" - "--statistics" - "--output-format=full" - id: ruff-format name: ruff (format) - repo: https://github.com/pre-commit/mirrors-mypy rev: "v1.15.0" hooks: - id: mypy additional_dependencies: - "pydantic>=2,<3" - "types-requests" log_file: "mypy.log" files: "^src/.*" pass_filenames: false - repo: https://github.com/pre-commit/pygrep-hooks rev: v1.10.0 hooks: # - id: python-use-type-annotations - id: python-check-blanket-noqa - id: python-check-mock-methods - id: python-no-eval - id: python-no-log-warn - id: rst-backticks - id: rst-directive-colons - id: rst-inline-touching-normal - repo: https://github.com/jendrikseipp/vulture rev: "v2.11" hooks: - id: vulture args: - --min-confidence - "100" - --sort-by-size - "semantic_release" - "tests" - repo: https://github.com/pycqa/bandit rev: 1.7.8 hooks: - id: bandit args: - "-c" - "pyproject.toml" - "--quiet" - "src/" # Needed if using pyproject.toml for config additional_dependencies: ["bandit[toml]"] pass_filenames: false # GHA linting - repo: https://github.com/python-jsonschema/check-jsonschema rev: "0.28.0" hooks: - id: check-github-workflows - id: check-readthedocs - id: check-dependabot python-semantic-release-9.21.0/.readthedocs.yml000066400000000000000000000003561475670435200214520ustar00rootroot00000000000000--- version: 2 sphinx: configuration: docs/conf.py builder: html formats: - htmlzip python: install: - method: pip path: . extra_requirements: - docs build: os: "ubuntu-22.04" tools: python: "3" python-semantic-release-9.21.0/AUTHORS.rst000066400000000000000000000003461475670435200202420ustar00rootroot00000000000000Contributors ------------ |contributors| .. |contributors| image:: https://contributors-img.web.app/image?repo=relekang/python-semantic-release :target: https://github.com/relekang/python-semantic-release/graphs/contributors python-semantic-release-9.21.0/CHANGELOG.rst000066400000000000000000006061071475670435200204130ustar00rootroot00000000000000.. _changelog: ========= CHANGELOG ========= .. _changelog-v9.21.0: v9.21.0 (2025-02-23) ==================== ✨ Features ----------- * Add package name variant, ``python-semantic-release``, project script, closes `#1195`_ (`PR#1199`_, `1ac97bc`_) 📖 Documentation ---------------- * **github-actions**: Update example workflow to handle rapid merges (`PR#1200`_, `1a4116a`_) .. _#1195: https://github.com/python-semantic-release/python-semantic-release/issues/1195 .. _1a4116a: https://github.com/python-semantic-release/python-semantic-release/commit/1a4116af4b999144998cf94cf84c9c23ff2e352f .. _1ac97bc: https://github.com/python-semantic-release/python-semantic-release/commit/1ac97bc74c69ce61cec98242c19bf8adc1d37fb9 .. _PR#1199: https://github.com/python-semantic-release/python-semantic-release/pull/1199 .. _PR#1200: https://github.com/python-semantic-release/python-semantic-release/pull/1200 .. _changelog-v9.20.0: v9.20.0 (2025-02-17) ==================== ✨ Features ----------- * **cmd-version**: Enable stamping of tag formatted versions into files, closes `#846`_ (`PR#1190`_, `8906d8e`_) * **cmd-version**: Extend ``version_variables`` to stamp versions with ``@`` symbol separator, closes `#1156`_ (`PR#1185`_, `23f69b6`_) 📖 Documentation ---------------- * **configuration**: Add usage information for tag format version stamping (`PR#1190`_, `8906d8e`_) * **configuration**: Clarify ``version_variables`` config description & ``@`` separator usage (`PR#1185`_, `23f69b6`_) ⚙️ Build System ---------------- * **deps**: Add ``deprecated~=1.2`` for deprecation notices & sphinx documentation (`PR#1190`_, `8906d8e`_) .. _#1156: https://github.com/python-semantic-release/python-semantic-release/issues/1156 .. _#846: https://github.com/python-semantic-release/python-semantic-release/issues/846 .. _23f69b6: https://github.com/python-semantic-release/python-semantic-release/commit/23f69b6ac206d111b1e566367f9b2f033df5c87a .. _8906d8e: https://github.com/python-semantic-release/python-semantic-release/commit/8906d8e70467af1489d797ec8cb09b1f95e5d409 .. _PR#1185: https://github.com/python-semantic-release/python-semantic-release/pull/1185 .. _PR#1190: https://github.com/python-semantic-release/python-semantic-release/pull/1190 .. _changelog-v9.19.1: v9.19.1 (2025-02-11) ==================== 🪲 Bug Fixes ------------ * **changelog**: Standardize heading format for across all version sections (`PR#1182`_, `81f9e80`_) * **changelog-md**: Standardize heading format for extra release information (`PR#1182`_, `81f9e80`_) * **changelog-rst**: Standardize heading format for extra release information (`PR#1182`_, `81f9e80`_) * **config**: Handle invalid ``commit_parser`` type gracefully (`PR#1180`_, `903c8ba`_) * **release-notes**: Standardize heading format for extra release information (`PR#1182`_, `81f9e80`_) 📖 Documentation ---------------- * Fix spelling errors & inaccurate descriptions (`55d4a05`_) * **automatic-releases**: Declutter the table of contents for automatic release guides (`e8343ee`_) * **commit-parsing**: Update reference to section name of additional release info (`PR#1182`_, `81f9e80`_) .. _55d4a05: https://github.com/python-semantic-release/python-semantic-release/commit/55d4a05ff56321cf9874f8f302fbe7e5163ad4f7 .. _81f9e80: https://github.com/python-semantic-release/python-semantic-release/commit/81f9e80c3df185ef5e553e024b903ce153e14304 .. _903c8ba: https://github.com/python-semantic-release/python-semantic-release/commit/903c8ba68d797f7cd9e5025c9a3a3ad471c805ae .. _e8343ee: https://github.com/python-semantic-release/python-semantic-release/commit/e8343eeb38d3b4e18953ac0f97538df396d22b76 .. _PR#1180: https://github.com/python-semantic-release/python-semantic-release/pull/1180 .. _PR#1182: https://github.com/python-semantic-release/python-semantic-release/pull/1182 .. _changelog-v9.19.0: v9.19.0 (2025-02-10) ==================== ✨ Features ----------- * **parser-conventional**: Add official ``conventional-commits`` parser (`PR#1177`_, `27ddf84`_) 📖 Documentation ---------------- * Update references to Angular parser to Conventional Commit Parser (`PR#1177`_, `27ddf84`_) 💡 ADDITIONAL RELEASE INFORMATION --------------------------------- * **parser-conventional**: The 'angular' commit parser has been renamed to 'conventional' to match the official conventional-commits standard for which the 'angular' parser has evolved into. Please update your configurations to specify 'conventional' as the 'commit_parser' value in place of 'angular'. The 'angular' type will be removed in v11. .. _27ddf84: https://github.com/python-semantic-release/python-semantic-release/commit/27ddf840f8c812361c60bac9cf0b110d401f33d6 .. _PR#1177: https://github.com/python-semantic-release/python-semantic-release/pull/1177 .. _changelog-v9.18.1: v9.18.1 (2025-02-08) ==================== 🪲 Bug Fixes ------------ * **config**: Refactors default token resolution to prevent pre-mature insecure URL error, closes `#1074`_, `#1169`_ (`PR#1173`_, `37db258`_) .. _#1074: https://github.com/python-semantic-release/python-semantic-release/issues/1074 .. _#1169: https://github.com/python-semantic-release/python-semantic-release/issues/1169 .. _37db258: https://github.com/python-semantic-release/python-semantic-release/commit/37db2581620ad02e66716a4b3b365aa28abe65f8 .. _PR#1173: https://github.com/python-semantic-release/python-semantic-release/pull/1173 .. _changelog-v9.18.0: v9.18.0 (2025-02-06) ==================== ✨ Features ----------- * Add ``create_release_url`` & ``format_w_official_vcs_name`` filters (`PR#1161`_, `f853cf0`_) * **changelog**: Add ``create_pypi_url`` filter to jinja template render context (`PR#1160`_, `45d49c3`_) * **changelog**: Add additional release info to changeling from commit ``NOTICE``'s (`PR#1166`_, `834ce32`_) * **changelog-md**: Add additional release info section to default markdown template, closes `#223`_ (`PR#1166`_, `834ce32`_) * **changelog-rst**: Add additional release info section to default ReStructuredText template, closes `#223`_ (`PR#1166`_, `834ce32`_) * **commit-parser**: Enable parsers to identify additional release notices from commit msgs (`PR#1166`_, `834ce32`_) * **parser-angular**: Add a ``ignore_merge_commits`` option to discard parsing merge commits (`PR#1164`_, `463e43b`_) * **parser-angular**: Add functionality to parse out ``NOTICE:`` prefixed statements in commits, closes `#223`_ (`PR#1166`_, `834ce32`_) * **parser-emoji**: Add a ``ignore_merge_commits`` option to discard parsing merge commits (`PR#1164`_, `463e43b`_) * **parser-emoji**: Add functionality to parse out ``NOTICE:`` prefixed statements in commits, closes `#223`_ (`PR#1166`_, `834ce32`_) * **parsers**: Add option ``ignore_merge_commits`` to discard parsing merge commits (`PR#1164`_, `463e43b`_) * **release-notes**: Add license information to default release notes template, closes `#228`_ (`PR#1167`_, `41172c1`_) * **vcs-bitbucket**: Add ``format_w_official_vcs_name`` filter function (`PR#1161`_, `f853cf0`_) * **vcs-gitea**: Add ``create_release_url`` & ``format_w_official_vcs_name`` filter functions (`PR#1161`_, `f853cf0`_) * **vcs-github**: Add ``create_release_url`` & ``format_w_official_vcs_name`` filter functions (`PR#1161`_, `f853cf0`_) * **vcs-gitlab**: Add ``create_release_url`` & ``format_w_official_vcs_name`` filter functions (`PR#1161`_, `f853cf0`_) 🪲 Bug Fixes ------------ * Refactor parsing compatibility function to support older custom parsers (`PR#1165`_, `cf340c5`_) * **changelog**: Fix parsing compatibility w/ custom parsers, closes `#1162`_ (`PR#1165`_, `cf340c5`_) * **changelog-templates**: Adjust default templates to avoid empty version sections (`PR#1164`_, `463e43b`_) * **parser-angular**: Adjust parser to prevent empty message extractions (`PR#1166`_, `834ce32`_) * **parser-emoji**: Adjust parser to prevent empty message extractions (`PR#1166`_, `834ce32`_) * **version**: Fix parsing compatibility w/ custom parsers, closes `#1162`_ (`PR#1165`_, `cf340c5`_) 📖 Documentation ---------------- * **changelog**: Add formatted changelog into hosted documentation (`PR#1155`_, `2f18a6d`_) * **changelog-templates**: Add description for new ``create_pypi_url`` filter function (`PR#1160`_, `45d49c3`_) * **changelog-templates**: Add details about license specification in the release notes (`PR#1167`_, `41172c1`_) * **changelog-templates**: Define ``create_release_url`` & ``format_w_official_vcs_name`` filters (`PR#1161`_, `f853cf0`_) * **changelog-templates**: Document special separate sections of commit descriptions (`ebb4c67`_) * **commit-parsing**: Document new release notice footer detection feature of built-in parsers (`cd14e92`_) .. _#1162: https://github.com/python-semantic-release/python-semantic-release/issues/1162 .. _#223: https://github.com/python-semantic-release/python-semantic-release/issues/223 .. _#228: https://github.com/python-semantic-release/python-semantic-release/issues/228 .. _2f18a6d: https://github.com/python-semantic-release/python-semantic-release/commit/2f18a6debfa6ef3afcc5611a3e09262998f2d4bf .. _41172c1: https://github.com/python-semantic-release/python-semantic-release/commit/41172c1272a402e94e3c68571d013cbdcb5b9023 .. _45d49c3: https://github.com/python-semantic-release/python-semantic-release/commit/45d49c3da75a7f08c86fc9bab5d232a9b37d9e72 .. _463e43b: https://github.com/python-semantic-release/python-semantic-release/commit/463e43b897ee80dfaf7ce9d88d22ea8e652bcf55 .. _834ce32: https://github.com/python-semantic-release/python-semantic-release/commit/834ce323007c58229abf115ef2016a348de9ee66 .. _cd14e92: https://github.com/python-semantic-release/python-semantic-release/commit/cd14e9209d4e54f0876e737d1f802dded294a48c .. _cf340c5: https://github.com/python-semantic-release/python-semantic-release/commit/cf340c5256dea58aedad71a6bdf50b17eee53d2f .. _ebb4c67: https://github.com/python-semantic-release/python-semantic-release/commit/ebb4c67d46b86fdf79e32edf744a2ec2b09d6a93 .. _f853cf0: https://github.com/python-semantic-release/python-semantic-release/commit/f853cf059b3323d7888b06fde09142184e7964e8 .. _PR#1155: https://github.com/python-semantic-release/python-semantic-release/pull/1155 .. _PR#1160: https://github.com/python-semantic-release/python-semantic-release/pull/1160 .. _PR#1161: https://github.com/python-semantic-release/python-semantic-release/pull/1161 .. _PR#1164: https://github.com/python-semantic-release/python-semantic-release/pull/1164 .. _PR#1165: https://github.com/python-semantic-release/python-semantic-release/pull/1165 .. _PR#1166: https://github.com/python-semantic-release/python-semantic-release/pull/1166 .. _PR#1167: https://github.com/python-semantic-release/python-semantic-release/pull/1167 .. _changelog-v9.17.0: v9.17.0 (2025-01-26) ==================== ✨ Features ----------- * **changelog**: Add ``sort_numerically`` filter function to template environment (`PR#1146`_, `7792388`_) * **changelog**: Parse squashed commits individually (`PR#1112`_, `cf785ca`_) * **config**: Extend support of remote urls aliased using git ``insteadOf`` configurations, closes `#1150`_ (`PR#1151`_, `4045037`_) * **parsers**: Parse squashed commits individually (`PR#1112`_, `cf785ca`_) * **parser-angular**: Apply PR/MR numbers to all parsed commits from a squash merge (`PR#1112`_, `cf785ca`_) * **parser-angular**: Upgrade angular parser to parse squashed commits individually, closes `#1085`_ (`PR#1112`_, `cf785ca`_) * **parser-emoji**: Add functionality to interpret scopes from gitmoji commit messages (`PR#1112`_, `cf785ca`_) * **parser-emoji**: Upgrade emoji parser to parse squashed commits individually (`PR#1112`_, `cf785ca`_) * **version**: Parse squashed commits individually (`PR#1112`_, `cf785ca`_) 🪲 Bug Fixes ------------ * **github-action**: Disable writing python bytecode in action execution (`PR#1152`_, `315ae21`_) ⚡ Performance Improvements --------------------------- * **logging**: Remove irrelevant debug logging statements (`PR#1147`_, `f1ef4ec`_) 📖 Documentation ---------------- * **changelog-templates**: Add description for new ``sort_numerically`` filter function (`PR#1146`_, `7792388`_) * **commit-parsing**: Add description for squash commit evaluation option of default parsers (`PR#1112`_, `cf785ca`_) * **configuration**: Update the ``commit_parser_options`` setting description (`PR#1112`_, `cf785ca`_) .. _#1085: https://github.com/python-semantic-release/python-semantic-release/issues/1085 .. _#1150: https://github.com/python-semantic-release/python-semantic-release/issues/1150 .. _315ae21: https://github.com/python-semantic-release/python-semantic-release/commit/315ae2176e211b00b13374560d81e127a3065d1a .. _4045037: https://github.com/python-semantic-release/python-semantic-release/commit/40450375c7951dafddb09bef8001db7180d95f3a .. _7792388: https://github.com/python-semantic-release/python-semantic-release/commit/77923885c585171e8888aacde989837ecbabf3fc .. _cf785ca: https://github.com/python-semantic-release/python-semantic-release/commit/cf785ca79a49eb4ee95c148e0ae6a19e230e915c .. _f1ef4ec: https://github.com/python-semantic-release/python-semantic-release/commit/f1ef4ecf5f22684a870b958f87d1ca2650e612db .. _PR#1112: https://github.com/python-semantic-release/python-semantic-release/pull/1112 .. _PR#1146: https://github.com/python-semantic-release/python-semantic-release/pull/1146 .. _PR#1147: https://github.com/python-semantic-release/python-semantic-release/pull/1147 .. _PR#1151: https://github.com/python-semantic-release/python-semantic-release/pull/1151 .. _PR#1152: https://github.com/python-semantic-release/python-semantic-release/pull/1152 .. _changelog-v9.16.1: v9.16.1 (2025-01-12) ==================== 🪲 Bug Fixes ------------ * **parser-custom**: Handle relative parent directory paths to module file better (`PR#1142`_, `c4056fc`_) 📖 Documentation ---------------- * **github-actions**: Update PSR versions in github workflow examples (`PR#1140`_, `9bdd626`_) .. _9bdd626: https://github.com/python-semantic-release/python-semantic-release/commit/9bdd626bf8f8359d35725cebe803931063260cac .. _c4056fc: https://github.com/python-semantic-release/python-semantic-release/commit/c4056fc2e1fb3bddb78728793716ac6fb8522b1a .. _PR#1140: https://github.com/python-semantic-release/python-semantic-release/pull/1140 .. _PR#1142: https://github.com/python-semantic-release/python-semantic-release/pull/1142 .. _changelog-v9.16.0: v9.16.0 (2025-01-12) ==================== ✨ Features ----------- * **config**: Expand dynamic parser import to handle a filepath to module (`PR#1135`_, `0418fd8`_) 🪲 Bug Fixes ------------ * **changelog**: Fixes PSR release commit exclusions for customized commit messages (`PR#1139`_, `f9a2078`_) * **cmd-version**: Fixes ``--print-tag`` result to match configured tag format (`PR#1134`_, `a990aa7`_) * **cmd-version**: Fixes tag format on default version when force bump for initial release, closes `#1137`_ (`PR#1138`_, `007fd00`_) * **config-changelog**: Validate ``changelog.exclude_commit_patterns`` on config load (`PR#1139`_, `f9a2078`_) 📖 Documentation ---------------- * **commit-parsing**: Add the new custom parser import spec description for direct path imports, closes `#687`_ (`PR#1135`_, `0418fd8`_) * **configuration**: Adjust ``commit_parser`` option definition for direct path imports (`PR#1135`_, `0418fd8`_) .. _#687: https://github.com/python-semantic-release/python-semantic-release/issues/687 .. _#1137: https://github.com/python-semantic-release/python-semantic-release/issues/1137 .. _007fd00: https://github.com/python-semantic-release/python-semantic-release/commit/007fd00a3945ed211ece4baab0b79ad93dc018f5 .. _0418fd8: https://github.com/python-semantic-release/python-semantic-release/commit/0418fd8d27aac14925aafa50912e751e3aeff2f7 .. _a990aa7: https://github.com/python-semantic-release/python-semantic-release/commit/a990aa7ab0a9d52d295c04d54d20e9c9f2db2ca5 .. _f9a2078: https://github.com/python-semantic-release/python-semantic-release/commit/f9a20787437d0f26074fe2121bf0a29576a96df0 .. _PR#1134: https://github.com/python-semantic-release/python-semantic-release/pull/1134 .. _PR#1135: https://github.com/python-semantic-release/python-semantic-release/pull/1135 .. _PR#1138: https://github.com/python-semantic-release/python-semantic-release/pull/1138 .. _PR#1139: https://github.com/python-semantic-release/python-semantic-release/pull/1139 .. _changelog-v9.15.2: v9.15.2 (2024-12-16) ==================== 🪲 Bug Fixes ------------ * **changelog**: Ensures user rendered files are trimmed to end with a single newline (`PR#1118`_, `6dfbbb0`_) * **cli**: Add error message of how to gather full error output (`PR#1116`_, `ba85532`_) * **cmd-version**: Enable maintenance prereleases (`PR#864`_, `b88108e`_) * **cmd-version**: Fix handling of multiple prerelease token variants & git flow merges (`PR#1120`_, `8784b9a`_) * **cmd-version**: Fix version determination algorithm to capture commits across merged branches (`PR#1120`_, `8784b9a`_) * **cmd-version**: Forces tag timestamp to be same time as release commit (`PR#1117`_, `7898b11`_) * **cmd-version**: Handle multiple prerelease token variants properly, closes `#789`_ (`PR#1120`_, `8784b9a`_) * **config**: Ensure default config loads on network mounted windows environments, closes `#1123`_ (`PR#1124`_, `a64cbc9`_) * **version**: Remove some excessive log msgs from debug to silly level (`PR#1120`_, `8784b9a`_) * **version-bump**: Increment based on current commit's history only, closes `#861`_ (`PR#864`_, `b88108e`_) ⚡ Performance Improvements --------------------------- * **cmd-version**: Refactor version determination algorithm for accuracy & speed (`PR#1120`_, `8784b9a`_) .. _#789: https://github.com/python-semantic-release/python-semantic-release/issues/789 .. _#861: https://github.com/python-semantic-release/python-semantic-release/issues/861 .. _#1123: https://github.com/python-semantic-release/python-semantic-release/issues/1123 .. _6dfbbb0: https://github.com/python-semantic-release/python-semantic-release/commit/6dfbbb0371aef6b125cbcbf89b80dc343ed97360 .. _7898b11: https://github.com/python-semantic-release/python-semantic-release/commit/7898b1185fc1ad10e96bf3f5e48d9473b45d2b51 .. _8784b9a: https://github.com/python-semantic-release/python-semantic-release/commit/8784b9ad4bc59384f855b5af8f1b8fb294397595 .. _a64cbc9: https://github.com/python-semantic-release/python-semantic-release/commit/a64cbc96c110e32f1ec5d1a7b61e950472491b87 .. _b88108e: https://github.com/python-semantic-release/python-semantic-release/commit/b88108e189e1894e36ae4fdf8ad8a382b5c8c90a .. _ba85532: https://github.com/python-semantic-release/python-semantic-release/commit/ba85532ddd6fcf1a2205f7ce0b88ea5be76cb621 .. _PR#864: https://github.com/python-semantic-release/python-semantic-release/pull/864 .. _PR#1116: https://github.com/python-semantic-release/python-semantic-release/pull/1116 .. _PR#1117: https://github.com/python-semantic-release/python-semantic-release/pull/1117 .. _PR#1118: https://github.com/python-semantic-release/python-semantic-release/pull/1118 .. _PR#1120: https://github.com/python-semantic-release/python-semantic-release/pull/1120 .. _PR#1124: https://github.com/python-semantic-release/python-semantic-release/pull/1124 .. _changelog-v9.15.1: v9.15.1 (2024-12-03) ==================== 🪲 Bug Fixes ------------ * **changelog-md**: Fix commit sort of breaking descriptions section (`75b342e`_) * **parser-angular**: Ensure issues are sorted by numeric value rather than text sorted (`3858add`_) * **parser-emoji**: Ensure issues are sorted by numeric value rather than text sorted (`7b8d2d9`_) .. _3858add: https://github.com/python-semantic-release/python-semantic-release/commit/3858add582fe758dc2ae967d0cd051d43418ecd0 .. _75b342e: https://github.com/python-semantic-release/python-semantic-release/commit/75b342e6259412cb82d8b7663e5ee4536d14f407 .. _7b8d2d9: https://github.com/python-semantic-release/python-semantic-release/commit/7b8d2d92e135ab46d1be477073ccccc8c576f121 .. _changelog-v9.15.0: v9.15.0 (2024-12-02) ==================== ✨ Features ----------- * **changelog-md**: Add a breaking changes section to default Markdown template, closes `#244`_ (`PR#1110`_, `4fde30e`_) * **changelog-md**: Alphabetize breaking change descriptions in markdown changelog template (`PR#1110`_, `4fde30e`_) * **changelog-md**: Alphabetize commit summaries & scopes in markdown changelog template (`PR#1111`_, `8327068`_) * **changelog-rst**: Add a breaking changes section to default reStructuredText template, closes `#244`_ (`PR#1110`_, `4fde30e`_) * **changelog-rst**: Alphabetize breaking change descriptions in ReStructuredText template (`PR#1110`_, `4fde30e`_) * **changelog-rst**: Alphabetize commit summaries & scopes in ReStructuredText template (`PR#1111`_, `8327068`_) * **commit-parser**: Enable parsers to flag commit to be ignored for changelog, closes `#778`_ (`PR#1108`_, `0cc668c`_) * **default-changelog**: Add a separate formatted breaking changes section, closes `#244`_ (`PR#1110`_, `4fde30e`_) * **default-changelog**: Alphabetize commit summaries & scopes in change sections (`PR#1111`_, `8327068`_) * **parsers**: Add ``other_allowed_tags`` option for commit parser options (`PR#1109`_, `f90b8dc`_) * **parsers**: Enable parsers to identify linked issues on a commit (`PR#1109`_, `f90b8dc`_) * **parser-angular**: Automatically parse angular issue footers from commit messages (`PR#1109`_, `f90b8dc`_) * **parser-custom**: Enable custom parsers to identify linked issues on a commit (`PR#1109`_, `f90b8dc`_) * **parser-emoji**: Parse issue reference footers from commit messages (`PR#1109`_, `f90b8dc`_) * **release-notes**: Add tag comparison link to release notes when supported (`PR#1107`_, `9073344`_) 🪲 Bug Fixes ------------ * **cmd-version**: Ensure release utilizes a timezone aware datetime (`ca817ed`_) * **default-changelog**: Alphabetically sort commit descriptions in version type sections (`bdaaf5a`_) * **util**: Prevent git footers from being collapsed during parse (`PR#1109`_, `f90b8dc`_) 📖 Documentation ---------------- * **api-parsers**: Add option documentation to parser options (`PR#1109`_, `f90b8dc`_) * **changelog-templates**: Update examples using new ``commit.linked_issues`` attribute (`PR#1109`_, `f90b8dc`_) * **commit-parsing**: Improve & expand commit parsing w/ parser descriptions (`PR#1109`_, `f90b8dc`_) .. _#244: https://github.com/python-semantic-release/python-semantic-release/issues/244 .. _#778: https://github.com/python-semantic-release/python-semantic-release/issues/778 .. _0cc668c: https://github.com/python-semantic-release/python-semantic-release/commit/0cc668c36490401dff26bb2c3141f6120a2c47d0 .. _4fde30e: https://github.com/python-semantic-release/python-semantic-release/commit/4fde30e0936ecd186e448f1caf18d9ba377c55ad .. _8327068: https://github.com/python-semantic-release/python-semantic-release/commit/83270683fd02b626ed32179d94fa1e3c7175d113 .. _9073344: https://github.com/python-semantic-release/python-semantic-release/commit/9073344164294360843ef5522e7e4c529985984d .. _bdaaf5a: https://github.com/python-semantic-release/python-semantic-release/commit/bdaaf5a460ca77edc40070ee799430122132dc45 .. _ca817ed: https://github.com/python-semantic-release/python-semantic-release/commit/ca817ed9024cf84b306a047675534cc36dc116b2 .. _f90b8dc: https://github.com/python-semantic-release/python-semantic-release/commit/f90b8dc6ce9f112ef2c98539d155f9de24398301 .. _PR#1107: https://github.com/python-semantic-release/python-semantic-release/pull/1107 .. _PR#1108: https://github.com/python-semantic-release/python-semantic-release/pull/1108 .. _PR#1109: https://github.com/python-semantic-release/python-semantic-release/pull/1109 .. _PR#1110: https://github.com/python-semantic-release/python-semantic-release/pull/1110 .. _PR#1111: https://github.com/python-semantic-release/python-semantic-release/pull/1111 .. _changelog-v9.14.0: v9.14.0 (2024-11-11) ==================== ✨ Features ----------- * **changelog**: Add md to rst conversion for markdown inline links (`cb2af1f`_) * **changelog**: Define first release w/o change descriptions for default MD template (`fa89dec`_) * **changelog**: Define first release w/o change descriptions for default RST template (`e30c94b`_) * **changelog**: Prefix scopes on commit descriptions in default template (`PR#1093`_, `560fd2c`_) * **changelog-md**: Add markdown inline link format macro (`c6d8211`_) * **changelog-md**: Prefix scopes on commit descriptions in Markdown changelog template (`PR#1093`_, `560fd2c`_) * **changelog-rst**: Prefix scopes on commit descriptions in ReStructuredText template (`PR#1093`_, `560fd2c`_) * **configuration**: Add ``changelog.default_templates.mask_initial_release`` option (`595a70b`_) * **context**: Add ``mask_initial_release`` setting to changelog context (`6f2ee39`_) * **release-notes**: Define first release w/o change descriptions in default template (`83167a3`_) 🪲 Bug Fixes ------------ * **release-notes**: Override default word-wrap to non-wrap for in default template (`99ab99b`_) 📖 Documentation ---------------- * **changelog-templates**: Document new ``mask_initial_release`` changelog context variable (`f294957`_) * **configuration**: Document new ``mask_initial_release`` option usage & effect (`3cabcdc`_) * **homepage**: Fix reference to new ci workflow for test status badge (`6760069`_) .. _3cabcdc: https://github.com/python-semantic-release/python-semantic-release/commit/3cabcdcd9473e008604e74cc2d304595317e921d .. _560fd2c: https://github.com/python-semantic-release/python-semantic-release/commit/560fd2c0d58c97318377cb83af899a336d24cfcc .. _595a70b: https://github.com/python-semantic-release/python-semantic-release/commit/595a70bcbc8fea1f8ccf6c5069c41c35ec4efb8d .. _6760069: https://github.com/python-semantic-release/python-semantic-release/commit/6760069e7489f50635beb5aedbbeb2cb82b7c584 .. _6f2ee39: https://github.com/python-semantic-release/python-semantic-release/commit/6f2ee39414b3cf75c0b67dee4db0146bbc1041bb .. _83167a3: https://github.com/python-semantic-release/python-semantic-release/commit/83167a3dcceb7db16b790e1b0efd5fc75fee8942 .. _99ab99b: https://github.com/python-semantic-release/python-semantic-release/commit/99ab99bb0ba350ca1913a2bde9696f4242278972 .. _c6d8211: https://github.com/python-semantic-release/python-semantic-release/commit/c6d8211c859442df17cb41d2ff19fdb7a81cdb76 .. _cb2af1f: https://github.com/python-semantic-release/python-semantic-release/commit/cb2af1f17cf6c8ae037c6cd8bb8b4d9c019bb47e .. _e30c94b: https://github.com/python-semantic-release/python-semantic-release/commit/e30c94bffe62b42e8dc6ed4fed6260e57b4d532b .. _f294957: https://github.com/python-semantic-release/python-semantic-release/commit/f2949577dfb2dbf9c2ac952c1bbcc4ab84da080b .. _fa89dec: https://github.com/python-semantic-release/python-semantic-release/commit/fa89dec239efbae7544b187f624a998fa9ecc309 .. _PR#1093: https://github.com/python-semantic-release/python-semantic-release/pull/1093 .. _changelog-v9.13.0: v9.13.0 (2024-11-10) ==================== ✨ Features ----------- * **changelog**: Add PR/MR url linking to default Markdown changelog, closes `#924`_, `#953`_ (`cd8d131`_) * **changelog**: Add PR/MR url linking to default reStructuredText template, closes `#924`_, `#953`_ (`5f018d6`_) * **parsed-commit**: Add linked merge requests list to the ``ParsedCommit`` object (`9a91062`_) * **parser-angular**: Automatically parse PR/MR numbers from subject lines in commits (`2ac798f`_) * **parser-emoji**: Automatically parse PR/MR numbers from subject lines in commits (`bca9909`_) * **parser-scipy**: Automatically parse PR/MR numbers from subject lines in commits (`2b3f738`_) 🪲 Bug Fixes ------------ * **changelog-rst**: Ignore unknown parsed commit types in default RST changelog (`77609b1`_) * **parser-angular**: Drop the ``breaking`` category but still maintain a major level bump (`f1ffa54`_) * **parsers**: Improve reliability of descriptions after reverse word-wrap (`436374b`_) ⚡ Performance Improvements --------------------------- * **parser-angular**: Simplify commit parsing type pre-calculation (`a86a28c`_) * **parser-emoji**: Increase speed of commit parsing (`2c9c468`_) * **parser-scipy**: Increase speed & decrease complexity of commit parsing (`2b661ed`_) 📖 Documentation ---------------- * **changelog-templates**: Add ``linked_merge_request`` field to examples (`d4376bc`_) * **changelog-templates**: Fix api class reference links (`7a5bdf2`_) * **commit-parsing**: Add ``linked_merge_request`` field to Parsed Commit definition (`ca61889`_) .. _#924: https://github.com/python-semantic-release/python-semantic-release/issues/924 .. _#953: https://github.com/python-semantic-release/python-semantic-release/issues/953 .. _2ac798f: https://github.com/python-semantic-release/python-semantic-release/commit/2ac798f92e0c13c1db668747f7e35a65b99ae7ce .. _2b3f738: https://github.com/python-semantic-release/python-semantic-release/commit/2b3f73801f5760bac29acd93db3ffb2bc790cda0 .. _2b661ed: https://github.com/python-semantic-release/python-semantic-release/commit/2b661ed122a6f0357a6b92233ac1351c54c7794e .. _2c9c468: https://github.com/python-semantic-release/python-semantic-release/commit/2c9c4685a66feb35cd78571cf05f76344dd6d66a .. _436374b: https://github.com/python-semantic-release/python-semantic-release/commit/436374b04128d1550467ae97ba90253f1d1b3878 .. _5f018d6: https://github.com/python-semantic-release/python-semantic-release/commit/5f018d630b4c625bdf6d329b27fd966eba75b017 .. _77609b1: https://github.com/python-semantic-release/python-semantic-release/commit/77609b1917a00b106ce254e6f6d5edcd1feebba7 .. _7a5bdf2: https://github.com/python-semantic-release/python-semantic-release/commit/7a5bdf29b3df0f9a1346ea5301d2a7fee953667b .. _9a91062: https://github.com/python-semantic-release/python-semantic-release/commit/9a9106212d6c240e9d3358e139b4c4694eaf9c4b .. _a86a28c: https://github.com/python-semantic-release/python-semantic-release/commit/a86a28c5e26ed766cda71d26b9382c392e377c61 .. _bca9909: https://github.com/python-semantic-release/python-semantic-release/commit/bca9909c1b61fdb1f9ccf823fceb6951cd059820 .. _ca61889: https://github.com/python-semantic-release/python-semantic-release/commit/ca61889d4ac73e9864fbf637fb87ab2d5bc053ea .. _cd8d131: https://github.com/python-semantic-release/python-semantic-release/commit/cd8d1310a4000cc79b529fbbdc58933f4c6373c6 .. _d4376bc: https://github.com/python-semantic-release/python-semantic-release/commit/d4376bc2ae4d3708d501d91211ec3ee3a923e9b5 .. _f1ffa54: https://github.com/python-semantic-release/python-semantic-release/commit/f1ffa5411892de34cdc842fd55c460a24b6685c6 .. _changelog-v9.12.2: v9.12.2 (2024-11-07) ==================== 🪲 Bug Fixes ------------ * **bitbucket**: Fix ``pull_request_url`` filter to ignore an PR prefix gracefully (`PR#1089`_, `275ec88`_) * **cli**: Gracefully capture all exceptions unless in very verbose debug mode (`PR#1088`_, `13ca44f`_) * **gitea**: Fix ``issue_url`` filter to ignore an issue prefix gracefully (`PR#1089`_, `275ec88`_) * **gitea**: Fix ``pull_request_url`` filter to ignore an PR prefix gracefully (`PR#1089`_, `275ec88`_) * **github**: Fix ``issue_url`` filter to ignore an issue prefix gracefully (`PR#1089`_, `275ec88`_) * **github**: Fix ``pull_request_url`` filter to ignore an PR prefix gracefully (`PR#1089`_, `275ec88`_) * **gitlab**: Fix ``issue_url`` filter to ignore an issue prefix gracefully (`PR#1089`_, `275ec88`_) * **gitlab**: Fix ``merge_request_url`` filter to ignore an PR prefix gracefully (`PR#1089`_, `275ec88`_) * **hvcs**: Add flexibility to issue & MR/PR url jinja filters (`PR#1089`_, `275ec88`_) 📖 Documentation ---------------- * **changelog-templates**: Update descriptions of issue & MR/PR url jinja filters (`PR#1089`_, `275ec88`_) .. _13ca44f: https://github.com/python-semantic-release/python-semantic-release/commit/13ca44f4434098331f70e6937684679cf1b4106a .. _275ec88: https://github.com/python-semantic-release/python-semantic-release/commit/275ec88e6d1637c47065bb752a60017ceba9876c .. _PR#1088: https://github.com/python-semantic-release/python-semantic-release/pull/1088 .. _PR#1089: https://github.com/python-semantic-release/python-semantic-release/pull/1089 .. _changelog-v9.12.1: v9.12.1 (2024-11-06) ==================== 🪲 Bug Fixes ------------ * **changelog**: Fix raw-inline pattern replacement in ``convert_md_to_rst`` filter (`2dc70a6`_) * **cmd-version**: Fix ``--as-prerelease`` when no commit change from last full release (`PR#1076`_, `3b7b772`_) * **release-notes**: Add context variable shorthand ``ctx`` like docs claim & changelog has (`d618d83`_) 📖 Documentation ---------------- * **contributing**: Update local testing instructions (`74f03d4`_) .. _2dc70a6: https://github.com/python-semantic-release/python-semantic-release/commit/2dc70a6106776106b0fba474b0029071317d639f .. _3b7b772: https://github.com/python-semantic-release/python-semantic-release/commit/3b7b77246100cedd8cc8f289395f7641187ffdec .. _74f03d4: https://github.com/python-semantic-release/python-semantic-release/commit/74f03d44684b7b2d84f9f5e471425b02f8bf91c3 .. _d618d83: https://github.com/python-semantic-release/python-semantic-release/commit/d618d83360c4409fc149f70b97c5fe338fa89968 .. _PR#1076: https://github.com/python-semantic-release/python-semantic-release/pull/1076 .. _changelog-v9.12.0: v9.12.0 (2024-10-18) ==================== ✨ Features ----------- * **changelog**: Add ``autofit_text_width`` filter to template environment (`PR#1062`_, `83e4b86`_) 🪲 Bug Fixes ------------ * **changelog**: Ignore commit exclusion when a commit causes a version bump (`e8f886e`_) * **parser-angular**: Change ``Fixes`` commit type heading to ``Bug Fixes`` (`PR#1064`_, `09e3a4d`_) * **parser-emoji**: Enable the default bump level option (`bc27995`_) 📖 Documentation ---------------- * **changelog-templates**: Add definition & usage of ``autofit_text_width`` template filter (`PR#1062`_, `83e4b86`_) * **commit-parsers**: Add deprecation message for the tag parser (`af94540`_) * **configuration**: Add deprecation message for the tag parser (`a83b7e4`_) .. _09e3a4d: https://github.com/python-semantic-release/python-semantic-release/commit/09e3a4da6237740de8e9932d742b18d990e9d079 .. _83e4b86: https://github.com/python-semantic-release/python-semantic-release/commit/83e4b86abd4754c2f95ec2e674f04deb74b9a1e6 .. _a83b7e4: https://github.com/python-semantic-release/python-semantic-release/commit/a83b7e43e4eaa99790969a6c85f44e01cde80d0a .. _af94540: https://github.com/python-semantic-release/python-semantic-release/commit/af94540f2b1c63bf8a4dc977d5d0f66176962b64 .. _bc27995: https://github.com/python-semantic-release/python-semantic-release/commit/bc27995255a96b9d6cc743186e7c35098822a7f6 .. _e8f886e: https://github.com/python-semantic-release/python-semantic-release/commit/e8f886ef2abe8ceaea0a24a0112b92a167abd6a9 .. _PR#1062: https://github.com/python-semantic-release/python-semantic-release/pull/1062 .. _PR#1064: https://github.com/python-semantic-release/python-semantic-release/pull/1064 .. _changelog-v9.11.1: v9.11.1 (2024-10-15) ==================== 🪲 Bug Fixes ------------ * **changelog**: Prevent custom template errors when components are in hidden folders (`PR#1060`_, `a7614b0`_) .. _a7614b0: https://github.com/python-semantic-release/python-semantic-release/commit/a7614b0db8ce791e4252209e66f42b5b5275dffd .. _PR#1060: https://github.com/python-semantic-release/python-semantic-release/pull/1060 .. _changelog-v9.11.0: v9.11.0 (2024-10-12) ==================== ✨ Features ----------- * **changelog**: Add ``convert_md_to_rst`` filter to changelog environment (`PR#1055`_, `c2e8831`_) * **changelog**: Add default changelog in re-structured text format, closes `#399`_ (`PR#1055`_, `c2e8831`_) * **changelog**: Add default changelog template in reStructuredText format (`PR#1055`_, `c2e8831`_) * **config**: Enable default ``changelog.insertion_flag`` based on output format (`PR#1055`_, `c2e8831`_) * **config**: Enable target changelog filename to trigger RST output format, closes `#399`_ (`PR#1055`_, `c2e8831`_) 🪲 Bug Fixes ------------ * **changelog**: Correct spacing for default markdown template during updates (`PR#1055`_, `c2e8831`_) 📖 Documentation ---------------- * **changelog**: Clarify the ``convert_md_to_rst`` filter added to the template environment (`PR#1055`_, `c2e8831`_) * **changelog**: Increase detail about configuration options of default changelog creation (`PR#1055`_, `c2e8831`_) * **configuration**: Update ``changelog_file`` with deprecation notice of setting relocation (`PR#1055`_, `c2e8831`_) * **configuration**: Update ``output_format`` description for reStructuredText support (`PR#1055`_, `c2e8831`_) * **configuration**: Update details of ``insertion_flag``'s dynamic defaults with rst (`PR#1055`_, `c2e8831`_) .. _#399: https://github.com/python-semantic-release/python-semantic-release/issues/399 .. _c2e8831: https://github.com/python-semantic-release/python-semantic-release/commit/c2e883104d3c11e56f229638e988d8b571f86e34 .. _PR#1055: https://github.com/python-semantic-release/python-semantic-release/pull/1055 .. _changelog-v9.10.1: v9.10.1 (2024-10-10) ==================== 🪲 Bug Fixes ------------ * **config**: Handle branch match regex errors gracefully (`PR#1054`_, `4d12251`_) .. _4d12251: https://github.com/python-semantic-release/python-semantic-release/commit/4d12251c678a38de6b71cac5b9c1390eb9dd8ad6 .. _PR#1054: https://github.com/python-semantic-release/python-semantic-release/pull/1054 .. _changelog-v9.10.0: v9.10.0 (2024-10-08) ==================== ✨ Features ----------- * **changelog**: Add ``changelog_insertion_flag`` to changelog template context (`PR#1045`_, `c18c245`_) * **changelog**: Add ``changelog_mode`` to changelog template context (`PR#1045`_, `c18c245`_) * **changelog**: Add ``prev_changelog_file`` to changelog template context (`PR#1045`_, `c18c245`_) * **changelog**: Add ``read_file`` function to changelog template context (`PR#1045`_, `c18c245`_) * **changelog**: Add shorthand ``ctx`` variable to changelog template env (`PR#1045`_, `c18c245`_) * **changelog**: Modify changelog template to support changelog updates, closes `#858`_ (`PR#1045`_, `c18c245`_) * **config**: Add ``changelog.default_templates.output_format`` config option (`PR#1045`_, `c18c245`_) * **config**: Add ``changelog.insertion_flag`` as configuration option (`PR#1045`_, `c18c245`_) * **config**: Add ``changelog.mode`` as configuration option (`PR#1045`_, `c18c245`_) * **github-actions**: Add an action ``build`` directive to toggle the ``--skip-build`` option (`PR#1044`_, `26597e2`_) 🪲 Bug Fixes ------------ * **changelog**: Adjust angular heading names for readability (`PR#1045`_, `c18c245`_) * **changelog**: Ensure changelog templates can handle complex directory includes (`PR#1045`_, `c18c245`_) * **changelog**: Only render user templates when files exist (`PR#1045`_, `c18c245`_) * **config**: Prevent jinja from autoescaping markdown content by default (`PR#1045`_, `c18c245`_) 📖 Documentation ---------------- * **changelog-templates**: Improve detail & describe new ``changelog.mode="update"`` (`PR#1045`_, `c18c245`_) * **commands**: Update definition of the version commands ``--skip-build`` option (`PR#1044`_, `26597e2`_) * **configuration**: Add ``changelog.mode`` and ``changelog.insertion_flag`` config definitions (`PR#1045`_, `c18c245`_) * **configuration**: Define the new ``changelog.default_templates.output_format`` option (`PR#1045`_, `c18c245`_) * **configuration**: Mark version of configuration setting introduction (`PR#1045`_, `c18c245`_) * **configuration**: Standardize all true/false to lowercase ensuring toml-compatibility (`PR#1045`_, `c18c245`_) * **configuration**: Update ``changelog.environment.autoescape`` default to ``false`` to match code (`PR#1045`_, `c18c245`_) * **github-actions**: Add description of the ``build`` input directive (`PR#1044`_, `26597e2`_) * **github-actions**: Update primary example with workflow sha controlled pipeline (`14f04df`_) * **homepage**: Update custom changelog reference (`PR#1045`_, `c18c245`_) .. _#722: https://github.com/python-semantic-release/python-semantic-release/issues/722 .. _#858: https://github.com/python-semantic-release/python-semantic-release/issues/858 .. _14f04df: https://github.com/python-semantic-release/python-semantic-release/commit/14f04dffc7366142faecebb162d4449501cbf1fd .. _26597e2: https://github.com/python-semantic-release/python-semantic-release/commit/26597e24a80a37500264aa95a908ba366699099e .. _c18c245: https://github.com/python-semantic-release/python-semantic-release/commit/c18c245df51a9778af09b9dc7a315e3f11cdcda0 .. _PR#1044: https://github.com/python-semantic-release/python-semantic-release/pull/1044 .. _PR#1045: https://github.com/python-semantic-release/python-semantic-release/pull/1045 .. _changelog-v9.9.0: v9.9.0 (2024-09-28) =================== ✨ Features ----------- * **github-actions**: Add ``is_prerelease`` output to the version action (`PR#1038`_, `6a5d35d`_) 📖 Documentation ---------------- * **automatic-releases**: Drop extraneous github push configuration (`PR#1011`_, `2135c68`_) * **github-actions**: Add configuration & description of publish action (`PR#1011`_, `2135c68`_) * **github-actions**: Add description of new ``is_prerelease`` output for version action (`PR#1038`_, `6a5d35d`_) * **github-actions**: Clarify & consolidate GitHub Actions usage docs, closes `#907`_ (`PR#1011`_, `2135c68`_) * **github-actions**: Expand descriptions & clarity of actions configs (`PR#1011`_, `2135c68`_) * **github-actions**: Revert removal of namespace prefix from examples (`PR#1011`_, `2135c68`_) * **homepage**: Remove link to old github config & update token scope config (`PR#1011`_, `2135c68`_) .. _#907: https://github.com/python-semantic-release/python-semantic-release/issues/907 .. _2135c68: https://github.com/python-semantic-release/python-semantic-release/commit/2135c68ccbdad94378809902b52fcad546efd5b3 .. _6a5d35d: https://github.com/python-semantic-release/python-semantic-release/commit/6a5d35d0d9124d6a6ee7910711b4154b006b8773 .. _PR#1011: https://github.com/python-semantic-release/python-semantic-release/pull/1011 .. _PR#1038: https://github.com/python-semantic-release/python-semantic-release/pull/1038 .. _changelog-v9.8.9: v9.8.9 (2024-09-27) =================== 🪲 Bug Fixes ------------ * **version-cmd**: Ensure ``version_variables`` do not match partial variable names (`PR#1028`_, `156915c`_) * **version-cmd**: Improve ``version_variables`` flexibility w/ quotes (ie. json, yaml, etc) (`PR#1028`_, `156915c`_) * **version-cmd**: Increase ``version_variable`` flexibility with quotations (ie. json, yaml, etc), closes `#601`_, `#706`_, `#962`_, `#1026`_ (`PR#1028`_, `156915c`_) 📖 Documentation ---------------- * Update docstrings to resolve sphinx failures, closes `#1029`_ (`PR#1030`_, `d84efc7`_) * **configuration**: Add clarity to ``version_variables`` usage & limitations (`PR#1028`_, `156915c`_) * **homepage**: Re-structure homepage to be separate from project readme (`PR#1032`_, `2307ed2`_) * **README**: Simplify README to point at official docs (`PR#1032`_, `2307ed2`_) .. _#1026: https://github.com/python-semantic-release/python-semantic-release/issues/1026 .. _#1029: https://github.com/python-semantic-release/python-semantic-release/issues/1029 .. _#601: https://github.com/python-semantic-release/python-semantic-release/issues/601 .. _#706: https://github.com/python-semantic-release/python-semantic-release/issues/706 .. _#962: https://github.com/python-semantic-release/python-semantic-release/issues/962 .. _156915c: https://github.com/python-semantic-release/python-semantic-release/commit/156915c7d759098f65cf9de7c4e980b40b38d5f1 .. _2307ed2: https://github.com/python-semantic-release/python-semantic-release/commit/2307ed29d9990bf1b6821403a4b8db3365ef8bb5 .. _d84efc7: https://github.com/python-semantic-release/python-semantic-release/commit/d84efc7719a8679e6979d513d1c8c60904af7384 .. _PR#1028: https://github.com/python-semantic-release/python-semantic-release/pull/1028 .. _PR#1030: https://github.com/python-semantic-release/python-semantic-release/pull/1030 .. _PR#1032: https://github.com/python-semantic-release/python-semantic-release/pull/1032 .. _changelog-v9.8.8: v9.8.8 (2024-09-01) =================== 🪲 Bug Fixes ------------ * **config**: Fix path traversal detection for windows compatibility, closes `#994`_ (`PR#1014`_, `16e6daa`_) 📖 Documentation ---------------- * **configuration**: Update ``build_command`` env table for windows to use all capital vars (`0e8451c`_) * **github-actions**: Update version in examples to latest version (`3c894ea`_) .. _#994: https://github.com/python-semantic-release/python-semantic-release/issues/994 .. _0e8451c: https://github.com/python-semantic-release/python-semantic-release/commit/0e8451cf9003c6a3bdcae6878039d7d9a23d6d5b .. _16e6daa: https://github.com/python-semantic-release/python-semantic-release/commit/16e6daaf851ce1eabf5fbd5aa9fe310a8b0f22b3 .. _3c894ea: https://github.com/python-semantic-release/python-semantic-release/commit/3c894ea8a555d20b454ebf34785e772959bbb4fe .. _PR#1014: https://github.com/python-semantic-release/python-semantic-release/pull/1014 .. _changelog-v9.8.7: v9.8.7 (2024-08-20) =================== 🪲 Bug Fixes ------------ * Provide ``context.history`` global in release notes templates (`PR#1005`_, `5bd91b4`_) * **release-notes**: Fix noop-changelog to print raw release notes (`PR#1005`_, `5bd91b4`_) * **release-notes**: Provide ``context.history`` global in release note templates, closes `#984`_ (`PR#1005`_, `5bd91b4`_) 📖 Documentation ---------------- * Use pinned version for GHA examples (`PR#1004`_, `5fdf761`_) * **changelog**: Clarify description of the default changelog generation process (`399fa65`_) * **configuration**: Clarify ``changelog_file`` vs ``template_dir`` option usage, closes `#983`_ (`a7199c8`_) * **configuration**: Fix build_command_env table rendering (`PR#996`_, `a5eff0b`_) * **github-actions**: Adjust formatting & version warning in code snippets (`PR#1004`_, `5fdf761`_) * **github-actions**: Use pinned version for GHA examples, closes `#1003`_ (`PR#1004`_, `5fdf761`_) .. _#1003: https://github.com/python-semantic-release/python-semantic-release/issues/1003 .. _#983: https://github.com/python-semantic-release/python-semantic-release/issues/983 .. _#984: https://github.com/python-semantic-release/python-semantic-release/issues/984 .. _399fa65: https://github.com/python-semantic-release/python-semantic-release/commit/399fa6521d5c6c4397b1d6e9b13ea7945ae92543 .. _5bd91b4: https://github.com/python-semantic-release/python-semantic-release/commit/5bd91b4d7ac33ddf10446f3e66d7d11e0724aeb2 .. _5fdf761: https://github.com/python-semantic-release/python-semantic-release/commit/5fdf7614c036a77ffb051cd30f57d0a63c062c0d .. _a5eff0b: https://github.com/python-semantic-release/python-semantic-release/commit/a5eff0bfe41d2fd5d9ead152a132010b718b7772 .. _a7199c8: https://github.com/python-semantic-release/python-semantic-release/commit/a7199c8cd6041a9de017694302e49b139bbcb034 .. _PR#1004: https://github.com/python-semantic-release/python-semantic-release/pull/1004 .. _PR#1005: https://github.com/python-semantic-release/python-semantic-release/pull/1005 .. _PR#996: https://github.com/python-semantic-release/python-semantic-release/pull/996 .. _changelog-v9.8.6: v9.8.6 (2024-07-20) =================== 🪲 Bug Fixes ------------ * **version-cmd**: Resolve build command execution in powershell (`PR#980`_, `32c8e70`_) 📖 Documentation ---------------- * **configuration**: Correct GHA parameter name for commit email (`PR#981`_, `ce9ffdb`_) .. _32c8e70: https://github.com/python-semantic-release/python-semantic-release/commit/32c8e70915634d8e560b470c3cf38c27cebd7ae0 .. _ce9ffdb: https://github.com/python-semantic-release/python-semantic-release/commit/ce9ffdb82c2358184b288fa18e83a4075f333277 .. _PR#980: https://github.com/python-semantic-release/python-semantic-release/pull/980 .. _PR#981: https://github.com/python-semantic-release/python-semantic-release/pull/981 .. _changelog-v9.8.5: v9.8.5 (2024-07-06) =================== 🪲 Bug Fixes ------------ * Enable ``--print-last-released*`` when in detached head or non-release branch (`PR#926`_, `782c0a6`_) * **changelog**: Resolve commit ordering issue when dates are similar (`PR#972`_, `bfda159`_) * **version-cmd**: Drop branch restriction for ``--print-last-released*`` opts, closes `#900`_ (`PR#926`_, `782c0a6`_) ⚡ Performance Improvements --------------------------- * Improve git history processing for changelog generation (`PR#972`_, `bfda159`_) * **changelog**: Improve git history parser changelog generation (`PR#972`_, `bfda159`_) .. _#900: https://github.com/python-semantic-release/python-semantic-release/issues/900 .. _782c0a6: https://github.com/python-semantic-release/python-semantic-release/commit/782c0a6109fb49e168c37f279928c0a4959f8ac6 .. _bfda159: https://github.com/python-semantic-release/python-semantic-release/commit/bfda1593af59e9e728c584dd88d7927fc52c879f .. _PR#926: https://github.com/python-semantic-release/python-semantic-release/pull/926 .. _PR#972: https://github.com/python-semantic-release/python-semantic-release/pull/972 .. _changelog-v9.8.4: v9.8.4 (2024-07-04) =================== 🪲 Bug Fixes ------------ * **changelog-cmd**: Remove usage strings when error occurred, closes `#810`_ (`348a51d`_) * **changelog-cmd**: Render default changelog when user template directory exist but is empty (`bded8de`_) * **config**: Prevent path traversal manipulation of target changelog location (`43e35d0`_) * **config**: Prevent path traversal manipulation of target changelog location (`3eb3dba`_) * **publish-cmd**: Prevent error when provided tag does not exist locally (`16afbbb`_) * **publish-cmd**: Remove usage strings when error occurred, closes `#810`_ (`afbb187`_) * **version-cmd**: Remove usage strings when error occurred, closes `#810`_ (`a7c17c7`_) .. _#810: https://github.com/python-semantic-release/python-semantic-release/issues/810 .. _16afbbb: https://github.com/python-semantic-release/python-semantic-release/commit/16afbbb8fbc3a97243e96d7573f4ad2eba09aab9 .. _348a51d: https://github.com/python-semantic-release/python-semantic-release/commit/348a51db8a837d951966aff3789aa0c93d473829 .. _3eb3dba: https://github.com/python-semantic-release/python-semantic-release/commit/3eb3dbafec4223ee463b90e927e551639c69426b .. _43e35d0: https://github.com/python-semantic-release/python-semantic-release/commit/43e35d0972e8a29239d18ed079d1e2013342fcbd .. _a7c17c7: https://github.com/python-semantic-release/python-semantic-release/commit/a7c17c73fd7becb6d0e042e45ff6765605187e2a .. _afbb187: https://github.com/python-semantic-release/python-semantic-release/commit/afbb187d6d405fdf6765082e2a1cecdcd7d357df .. _bded8de: https://github.com/python-semantic-release/python-semantic-release/commit/bded8deae6c92f6dde9774802d9f3716a5cb5705 .. _changelog-v9.8.3: v9.8.3 (2024-06-18) =================== 🪲 Bug Fixes ------------ * **parser**: Strip DOS carriage-returns in commits, closes `#955`_ (`PR#956`_, `0b005df`_) .. _#955: https://github.com/python-semantic-release/python-semantic-release/issues/955 .. _0b005df: https://github.com/python-semantic-release/python-semantic-release/commit/0b005df0a8c7730ee0c71453c9992d7b5d2400a4 .. _PR#956: https://github.com/python-semantic-release/python-semantic-release/pull/956 .. _changelog-v9.8.2: v9.8.2 (2024-06-17) =================== 🪲 Bug Fixes ------------ * **templates**: Suppress extra newlines in default changelog (`PR#954`_, `7b0079b`_) .. _7b0079b: https://github.com/python-semantic-release/python-semantic-release/commit/7b0079bf3e17c0f476bff520b77a571aeac469d0 .. _PR#954: https://github.com/python-semantic-release/python-semantic-release/pull/954 .. _changelog-v9.8.1: v9.8.1 (2024-06-05) =================== 🪲 Bug Fixes ------------ * Improve build cmd env on windows (`PR#942`_, `d911fae`_) * **version-cmd**: Pass windows specific env vars to build cmd when on windows (`PR#942`_, `d911fae`_) 📖 Documentation ---------------- * **configuration**: Define windows specific env vars for build cmd (`PR#942`_, `d911fae`_) .. _d911fae: https://github.com/python-semantic-release/python-semantic-release/commit/d911fae993d41a8cb1497fa8b2a7e823576e0f22 .. _PR#942: https://github.com/python-semantic-release/python-semantic-release/pull/942 .. _changelog-v9.8.0: v9.8.0 (2024-05-27) =================== ✨ Features ----------- * Extend gitlab to edit a previous release if exists (`PR#934`_, `23e02b9`_) * **gha**: Configure ssh signed tags in GitHub Action, closes `#936`_ (`PR#937`_, `dfb76b9`_) * **hvcs-gitlab**: Enable gitlab to edit a previous release if found (`PR#934`_, `23e02b9`_) * **version-cmd**: Add toggle of ``--no-verify`` option to ``git commit`` (`PR#927`_, `1de6f78`_) 🪲 Bug Fixes ------------ * **gitlab**: Adjust release name to mirror other hvcs release names (`PR#934`_, `23e02b9`_) * **hvcs-gitlab**: Add tag message to release creation (`PR#934`_, `23e02b9`_) 📖 Documentation ---------------- * **configuration**: Add ``no_git_verify`` description to the configuration page (`PR#927`_, `1de6f78`_) * **migration-v8**: Update version references in migration instructions (`PR#938`_, `d6ba16a`_) .. _#936: https://github.com/python-semantic-release/python-semantic-release/issues/936 .. _1de6f78: https://github.com/python-semantic-release/python-semantic-release/commit/1de6f7834c6d37a74bc53f91609d40793556b52d .. _23e02b9: https://github.com/python-semantic-release/python-semantic-release/commit/23e02b96dfb2a58f6b4ecf7b7812e4c1bc50573d .. _d6ba16a: https://github.com/python-semantic-release/python-semantic-release/commit/d6ba16aa8e01bae1a022a9b06cd0b9162c51c345 .. _dfb76b9: https://github.com/python-semantic-release/python-semantic-release/commit/dfb76b94b859a7f3fa3ad778eec7a86de2874d68 .. _PR#927: https://github.com/python-semantic-release/python-semantic-release/pull/927 .. _PR#934: https://github.com/python-semantic-release/python-semantic-release/pull/934 .. _PR#937: https://github.com/python-semantic-release/python-semantic-release/pull/937 .. _PR#938: https://github.com/python-semantic-release/python-semantic-release/pull/938 .. _changelog-v9.7.3: v9.7.3 (2024-05-15) =================== 🪲 Bug Fixes ------------ * Enabled ``prerelease-token`` parameter in github action (`PR#929`_, `1bb26b0`_) .. _1bb26b0: https://github.com/python-semantic-release/python-semantic-release/commit/1bb26b0762d94efd97c06a3f1b6b10fb76901f6d .. _PR#929: https://github.com/python-semantic-release/python-semantic-release/pull/929 .. _changelog-v9.7.2: v9.7.2 (2024-05-13) =================== 🪲 Bug Fixes ------------ * Enable user configuration of ``build_command`` env vars (`PR#925`_, `6b5b271`_) * **version**: Enable user config of ``build_command`` env variables, closes `#922`_ (`PR#925`_, `6b5b271`_) 📖 Documentation ---------------- * **configuration**: Clarify TOC & alphabetize configuration descriptions (`19add16`_) * **configuration**: Clarify TOC & standardize heading links (`3a41995`_) * **configuration**: Document ``build_command_env`` configuration option (`PR#925`_, `6b5b271`_) * **CONTRIBUTING**: Update build command definition for developers (`PR#921`_, `b573c4d`_) .. _#922: https://github.com/python-semantic-release/python-semantic-release/issues/922 .. _19add16: https://github.com/python-semantic-release/python-semantic-release/commit/19add16dcfdfdb812efafe2d492a933d0856df1d .. _3a41995: https://github.com/python-semantic-release/python-semantic-release/commit/3a4199542d0ea4dbf88fa35e11bec41d0c27dd17 .. _6b5b271: https://github.com/python-semantic-release/python-semantic-release/commit/6b5b271453874b982fbf2827ec1f6be6db1c2cc7 .. _b573c4d: https://github.com/python-semantic-release/python-semantic-release/commit/b573c4d4a2c212be9bdee918501bb5e046c6a806 .. _PR#921: https://github.com/python-semantic-release/python-semantic-release/pull/921 .. _PR#925: https://github.com/python-semantic-release/python-semantic-release/pull/925 .. _changelog-v9.7.1: v9.7.1 (2024-05-07) =================== 🪲 Bug Fixes ------------ * **gha**: Fix missing ``git_committer_*`` definition in action, closes `#918`_ (`PR#919`_, `ccef9d8`_) .. _#918: https://github.com/python-semantic-release/python-semantic-release/issues/918 .. _ccef9d8: https://github.com/python-semantic-release/python-semantic-release/commit/ccef9d8521be12c0640369b3c3a80b81a7832662 .. _PR#919: https://github.com/python-semantic-release/python-semantic-release/pull/919 .. _changelog-v9.7.0: v9.7.0 (2024-05-06) =================== ✨ Features ----------- * **version-cmd**: Pass ``NEW_VERSION`` & useful env vars to build command (`ee6b246`_) 🪲 Bug Fixes ------------ * **gha**: Add missing ``tag`` option to GitHub Action definition, closes `#906`_ (`PR#908`_, `6b24288`_) * **gha**: Correct use of ``prerelease`` option for GitHub Action (`PR#914`_, `85e27b7`_) 📖 Documentation ---------------- * **configuration**: Add description of build command available env variables (`c882dc6`_) * **gha**: Update GitHub Actions doc with all available options (`PR#914`_, `85e27b7`_) ⚙️ Build System ---------------- * **deps**: Bump GitHub Action container to use ``python3.12``, closes `#801`_ (`PR#914`_, `85e27b7`_) .. _#801: https://github.com/python-semantic-release/python-semantic-release/issues/801 .. _#906: https://github.com/python-semantic-release/python-semantic-release/issues/906 .. _6b24288: https://github.com/python-semantic-release/python-semantic-release/commit/6b24288a96302cd6982260e46fad128ec4940da9 .. _85e27b7: https://github.com/python-semantic-release/python-semantic-release/commit/85e27b7f486e6b0e6cc9e85e101a97e676bc3d60 .. _c882dc6: https://github.com/python-semantic-release/python-semantic-release/commit/c882dc62b860b2aeaa925c21d1524f4ae25ef567 .. _ee6b246: https://github.com/python-semantic-release/python-semantic-release/commit/ee6b246df3bb211ab49c8bce075a4c3f6a68ed77 .. _PR#908: https://github.com/python-semantic-release/python-semantic-release/pull/908 .. _PR#914: https://github.com/python-semantic-release/python-semantic-release/pull/914 .. _changelog-v9.6.0: v9.6.0 (2024-04-29) =================== ✨ Features ----------- * Changelog filters are specialized per vcs type (`PR#890`_, `76ed593`_) * **changelog**: Changelog filters are hvcs focused (`PR#890`_, `76ed593`_) * **changelog-context**: Add flag to jinja env for which hvcs is available (`PR#890`_, `76ed593`_) * **changelog-gitea**: Add issue url filter to changelog context (`PR#890`_, `76ed593`_) * **changelog-github**: Add issue url filter to changelog context (`PR#890`_, `76ed593`_) * **version-cmd**: Add ``--as-prerelease`` option to force the next version to be a prerelease, closes `#639`_ (`PR#647`_, `2acb5ac`_) 🪲 Bug Fixes ------------ * Correct version ``--prerelease`` use & enable ``--as-prerelease`` (`PR#647`_, `2acb5ac`_) * **github**: Correct changelog filter for pull request urls (`PR#890`_, `76ed593`_) * **parser-custom**: Gracefully handle custom parser import errors (`67f6038`_) * **version-cmd**: Correct ``--prerelease`` use, closes `#639`_ (`PR#647`_, `2acb5ac`_) 📖 Documentation ---------------- * **changelog-context**: Explain new hvcs specific context filters (`PR#890`_, `76ed593`_) * **commands**: Update version command options definition about prereleases (`PR#647`_, `2acb5ac`_) .. _#639: https://github.com/python-semantic-release/python-semantic-release/issues/639 .. _2acb5ac: https://github.com/python-semantic-release/python-semantic-release/commit/2acb5ac35ae79d7ae25ca9a03fb5c6a4a68b3673 .. _67f6038: https://github.com/python-semantic-release/python-semantic-release/commit/67f60389e3f6e93443ea108c0e1b4d30126b8e06 .. _76ed593: https://github.com/python-semantic-release/python-semantic-release/commit/76ed593ea33c851005994f0d1a6a33cc890fb908 .. _PR#647: https://github.com/python-semantic-release/python-semantic-release/pull/647 .. _PR#890: https://github.com/python-semantic-release/python-semantic-release/pull/890 .. _changelog-v9.5.0: v9.5.0 (2024-04-23) =================== ✨ Features ----------- * Extend support to on-prem GitHub Enterprise Server (`PR#896`_, `4fcb737`_) * **github**: Extend support to on-prem GitHub Enterprise Server, closes `#895`_ (`PR#896`_, `4fcb737`_) .. _#895: https://github.com/python-semantic-release/python-semantic-release/issues/895 .. _4fcb737: https://github.com/python-semantic-release/python-semantic-release/commit/4fcb737958d95d1a3be24db7427e137b46f5075f .. _PR#896: https://github.com/python-semantic-release/python-semantic-release/pull/896 .. _changelog-v9.4.2: v9.4.2 (2024-04-14) =================== 🪲 Bug Fixes ------------ * **bitbucket**: Allow insecure http connections if configured (`PR#886`_, `db13438`_) * **bitbucket**: Correct url parsing & prevent double url schemes (`PR#676`_, `5cfdb24`_) * **config**: Add flag to allow insecure connections (`PR#886`_, `db13438`_) * **gitea**: Allow insecure http connections if configured (`PR#886`_, `db13438`_) * **gitea**: Correct url parsing & prevent double url schemes (`PR#676`_, `5cfdb24`_) * **github**: Allow insecure http connections if configured (`PR#886`_, `db13438`_) * **github**: Correct url parsing & prevent double url schemes (`PR#676`_, `5cfdb24`_) * **gitlab**: Allow insecure http connections if configured (`PR#886`_, `db13438`_) * **gitlab**: Correct url parsing & prevent double url schemes (`PR#676`_, `5cfdb24`_) * **hvcs**: Allow insecure http connections if configured (`PR#886`_, `db13438`_) * **hvcs**: Prevent double protocol scheme urls in changelogs (`PR#676`_, `5cfdb24`_) * **version-cmd**: Handle HTTP exceptions more gracefully (`PR#886`_, `db13438`_) 📖 Documentation ---------------- * **configuration**: Update ``remote`` settings section with missing values, closes `#868`_ (`PR#886`_, `db13438`_) ⚙️ Build System ---------------- * **deps**: Update rich requirement from ~=12.5 to ~=13.0, closes `#888`_ (`PR#877`_, `4a22a8c`_) .. _#868: https://github.com/python-semantic-release/python-semantic-release/issues/868 .. _#888: https://github.com/python-semantic-release/python-semantic-release/issues/888 .. _4a22a8c: https://github.com/python-semantic-release/python-semantic-release/commit/4a22a8c1a69bcf7b1ddd6db56e6883c617a892b3 .. _5cfdb24: https://github.com/python-semantic-release/python-semantic-release/commit/5cfdb248c003a2d2be5fe65fb61d41b0d4c45db5 .. _db13438: https://github.com/python-semantic-release/python-semantic-release/commit/db1343890f7e0644bc8457f995f2bd62087513d3 .. _PR#676: https://github.com/python-semantic-release/python-semantic-release/pull/676 .. _PR#877: https://github.com/python-semantic-release/python-semantic-release/pull/877 .. _PR#886: https://github.com/python-semantic-release/python-semantic-release/pull/886 .. _changelog-v9.4.1: v9.4.1 (2024-04-06) =================== 🪲 Bug Fixes ------------ * **gh-actions-output**: Fixed trailing newline to match GITHUB_OUTPUT format (`PR#885`_, `2c7b6ec`_) * **gh-actions-output**: Fixed trailing newline to match GITHUB_OUTPUT format, closes `#884`_ (`PR#885`_, `2c7b6ec`_) .. _#884: https://github.com/python-semantic-release/python-semantic-release/issues/884 .. _2c7b6ec: https://github.com/python-semantic-release/python-semantic-release/commit/2c7b6ec85b6e3182463d7b695ee48e9669a25b3b .. _PR#885: https://github.com/python-semantic-release/python-semantic-release/pull/885 .. _changelog-v9.4.0: v9.4.0 (2024-03-31) =================== ✨ Features ----------- * **gitea**: Derives gitea api domain from base domain when unspecified (`PR#675`_, `2ee3f8a`_) .. _2ee3f8a: https://github.com/python-semantic-release/python-semantic-release/commit/2ee3f8a918d2e5ea9ab64df88f52e62a1f589c38 .. _PR#675: https://github.com/python-semantic-release/python-semantic-release/pull/675 .. _changelog-v9.3.1: v9.3.1 (2024-03-24) =================== 🪲 Bug Fixes ------------ * **algorithm**: Handle merge-base errors gracefully, closes `#724`_ (`4c998b7`_) * **cli-version**: Change implementation to only push the tag we generated, closes `#803`_ (`8a9da4f`_) ⚡ Performance Improvements --------------------------- * **algorithm**: Simplify logs & use lookup when searching for commit & tag match (`3690b95`_) .. _#724: https://github.com/python-semantic-release/python-semantic-release/issues/724 .. _#803: https://github.com/python-semantic-release/python-semantic-release/issues/803 .. _3690b95: https://github.com/python-semantic-release/python-semantic-release/commit/3690b9511de633ab38083de4d2505b6d05853346 .. _4c998b7: https://github.com/python-semantic-release/python-semantic-release/commit/4c998b77a3fe5e12783d1ab2d47789a10b83f247 .. _8a9da4f: https://github.com/python-semantic-release/python-semantic-release/commit/8a9da4feb8753e3ab9ea752afa25decd2047675a .. _changelog-v9.3.0: v9.3.0 (2024-03-21) =================== ✨ Features ----------- * **cmd-version**: Changelog available to bundle (`PR#779`_, `37fdb28`_) * **cmd-version**: Create changelog prior to build enabling doc bundling (`PR#779`_, `37fdb28`_) .. _37fdb28: https://github.com/python-semantic-release/python-semantic-release/commit/37fdb28e0eb886d682b5dea4cc83a7c98a099422 .. _PR#779: https://github.com/python-semantic-release/python-semantic-release/pull/779 .. _changelog-v9.2.2: v9.2.2 (2024-03-19) =================== 🪲 Bug Fixes ------------ * **cli**: Enable subcommand help even if config is invalid, closes `#840`_ (`91d221a`_) .. _#840: https://github.com/python-semantic-release/python-semantic-release/issues/840 .. _91d221a: https://github.com/python-semantic-release/python-semantic-release/commit/91d221a01266e5ca6de5c73296b0a90987847494 .. _changelog-v9.2.1: v9.2.1 (2024-03-19) =================== 🪲 Bug Fixes ------------ * **parse-git-url**: Handle urls with url-safe special characters (`27cd93a`_) .. _27cd93a: https://github.com/python-semantic-release/python-semantic-release/commit/27cd93a0a65ee3787ca51be4c91c48f6ddb4269c .. _changelog-v9.2.0: v9.2.0 (2024-03-18) =================== ✨ Features ----------- * **version**: Add new version print flags to display the last released version and tag (`814240c`_) * **version-config**: Add option to disable 0.x.x versions (`dedb3b7`_) 🪲 Bug Fixes ------------ * **changelog**: Make sure default templates render ending in 1 newline (`0b4a45e`_) * **changelog-generation**: Fix incorrect release timezone determination (`f802446`_) 📖 Documentation ---------------- * **configuration**: Add description of ``allow-zero-version`` configuration option (`4028f83`_) * **configuration**: Clarify the ``major_on_zero`` configuration option (`f7753cd`_) ⚙️ Build System ---------------- * **deps**: Add click-option-group for grouping exclusive flags (`bd892b8`_) .. _0b4a45e: https://github.com/python-semantic-release/python-semantic-release/commit/0b4a45e3673d0408016dc8e7b0dce98007a763e3 .. _4028f83: https://github.com/python-semantic-release/python-semantic-release/commit/4028f8384a0181c8d58c81ae81cf0b241a02a710 .. _814240c: https://github.com/python-semantic-release/python-semantic-release/commit/814240c7355df95e9be9a6ed31d004b800584bc0 .. _bd892b8: https://github.com/python-semantic-release/python-semantic-release/commit/bd892b89c26df9fccc9335c84e2b3217e3e02a37 .. _dedb3b7: https://github.com/python-semantic-release/python-semantic-release/commit/dedb3b765c8530379af61d3046c3bb9c160d54e5 .. _f7753cd: https://github.com/python-semantic-release/python-semantic-release/commit/f7753cdabd07e276bc001478d605fca9a4b37ec4 .. _f802446: https://github.com/python-semantic-release/python-semantic-release/commit/f802446bd0693c4c9f6bdfdceae8b89c447827d2 .. _changelog-v9.1.1: v9.1.1 (2024-02-25) =================== 🪲 Bug Fixes ------------ * **parse_git_url**: Fix bad url with dash (`1c25b8e`_) .. _1c25b8e: https://github.com/python-semantic-release/python-semantic-release/commit/1c25b8e6f1e43c15ca7d5a59dca0a13767f9bc33 .. _changelog-v9.1.0: v9.1.0 (2024-02-14) =================== ✨ Features ----------- * Add bitbucket hvcs (`bbbbfeb`_) 🪲 Bug Fixes ------------ * Remove unofficial environment variables (`a5168e4`_) 📖 Documentation ---------------- * Add bitbucket authentication (`b78a387`_) * Add bitbucket to token table (`56f146d`_) * Fix typo (`b240e12`_) ⚙️ Build System ---------------- * **deps**: Bump minimum required ``tomlkit`` to ``>=0.11.0``, closes `#834`_ (`291aace`_) .. _#834: https://github.com/python-semantic-release/python-semantic-release/issues/834 .. _291aace: https://github.com/python-semantic-release/python-semantic-release/commit/291aacea1d0429a3b27e92b0a20b598f43f6ea6b .. _56f146d: https://github.com/python-semantic-release/python-semantic-release/commit/56f146d9f4c0fc7f2a84ad11b21c8c45e9221782 .. _a5168e4: https://github.com/python-semantic-release/python-semantic-release/commit/a5168e40b9a14dbd022f62964f382b39faf1e0df .. _b240e12: https://github.com/python-semantic-release/python-semantic-release/commit/b240e129b180d45c1d63d464283b7dfbcb641d0c .. _b78a387: https://github.com/python-semantic-release/python-semantic-release/commit/b78a387d8eccbc1a6a424a183254fc576126199c .. _bbbbfeb: https://github.com/python-semantic-release/python-semantic-release/commit/bbbbfebff33dd24b8aed2d894de958d532eac596 .. _changelog-v9.0.3: v9.0.3 (2024-02-08) =================== 🪲 Bug Fixes ------------ * **algorithm**: Correct bfs to not abort on previously visited node (`02df305`_) ⚡ Performance Improvements --------------------------- * **algorithm**: Refactor bfs search to use queue rather than recursion (`8b742d3`_) .. _02df305: https://github.com/python-semantic-release/python-semantic-release/commit/02df305db43abfc3a1f160a4a52cc2afae5d854f .. _8b742d3: https://github.com/python-semantic-release/python-semantic-release/commit/8b742d3db6652981a7b5f773a74b0534edc1fc15 .. _changelog-v9.0.2: v9.0.2 (2024-02-08) =================== 🪲 Bug Fixes ------------ * **util**: Properly parse windows line-endings in commit messages, closes `#820`_ (`70193ba`_) 📖 Documentation ---------------- * Remove duplicate note in configuration.rst (`PR#807`_, `fb6f243`_) .. _#820: https://github.com/python-semantic-release/python-semantic-release/issues/820 .. _70193ba: https://github.com/python-semantic-release/python-semantic-release/commit/70193ba117c1a6d3690aed685fee8a734ba174e5 .. _fb6f243: https://github.com/python-semantic-release/python-semantic-release/commit/fb6f243a141642c02469f1080180ecaf4f3cec66 .. _PR#807: https://github.com/python-semantic-release/python-semantic-release/pull/807 .. _changelog-v9.0.1: v9.0.1 (2024-02-06) =================== 🪲 Bug Fixes ------------ * **config**: Set commit parser opt defaults based on parser choice (`PR#782`_, `9c594fb`_) .. _9c594fb: https://github.com/python-semantic-release/python-semantic-release/commit/9c594fb6efac7e4df2b0bfbd749777d3126d03d7 .. _PR#782: https://github.com/python-semantic-release/python-semantic-release/pull/782 .. _changelog-v9.0.0: v9.0.0 (2024-02-06) =================== ♻️ Refactoring --------------- * Drop support for Python 3.7 (`PR#828`_, `ad086f5`_) 💥 BREAKING CHANGES -------------------- * Removed Python 3.7 specific control flows and made more modern implementations the default control flow without a bypass or workaround. Will break on Python 3.7 now. If you require Python 3.7, you should lock your major version at v8. Since we only have enough manpower to maintain the latest major release, unfortunately there will not be any more updates to v8. * We decided to remove support for Python 3.7 because it has been officially deprecated by the Python Foundation over a year ago and our codebase is starting to have limitations and custom implementations just to maintain support for 3.7. .. _ad086f5: https://github.com/python-semantic-release/python-semantic-release/commit/ad086f5993ae4741d6e20fee618d1bce8df394fb .. _PR#828: https://github.com/python-semantic-release/python-semantic-release/pull/828 .. _changelog-v8.7.2: v8.7.2 (2024-01-03) =================== 🪲 Bug Fixes ------------ * **lint**: Correct linter errors (`c9556b0`_) .. _c9556b0: https://github.com/python-semantic-release/python-semantic-release/commit/c9556b0ca6df6a61e9ce909d18bc5be8b6154bf8 .. _changelog-v8.7.1: v8.7.1 (2024-01-03) =================== 🪲 Bug Fixes ------------ * **cli-generate-config**: Ensure configuration types are always toml parsable (`PR#785`_, `758e649`_) 📖 Documentation ---------------- * Add note on default envvar behavior (`PR#780`_, `0b07cae`_) * **configuration**: Change defaults definition of token default to table (`PR#786`_, `df1df0d`_) * **contributing**: Add docs-build, testing conf, & build instructions (`PR#787`_, `011b072`_) .. _011b072: https://github.com/python-semantic-release/python-semantic-release/commit/011b0729cba3045b4e7291fd970cb17aad7bae60 .. _0b07cae: https://github.com/python-semantic-release/python-semantic-release/commit/0b07cae71915c5c82d7784898b44359249542a64 .. _758e649: https://github.com/python-semantic-release/python-semantic-release/commit/758e64975fe46b961809f35977574729b7c44271 .. _df1df0d: https://github.com/python-semantic-release/python-semantic-release/commit/df1df0de8bc655cbf8f86ae52aff10efdc66e6d2 .. _PR#780: https://github.com/python-semantic-release/python-semantic-release/pull/780 .. _PR#785: https://github.com/python-semantic-release/python-semantic-release/pull/785 .. _PR#786: https://github.com/python-semantic-release/python-semantic-release/pull/786 .. _PR#787: https://github.com/python-semantic-release/python-semantic-release/pull/787 .. _changelog-v8.7.0: v8.7.0 (2023-12-22) =================== ✨ Features ----------- * **config**: Enable default environment token per hvcs (`PR#774`_, `26528eb`_) .. _26528eb: https://github.com/python-semantic-release/python-semantic-release/commit/26528eb8794d00dfe985812269702fbc4c4ec788 .. _PR#774: https://github.com/python-semantic-release/python-semantic-release/pull/774 .. _changelog-v8.6.0: v8.6.0 (2023-12-22) =================== ✨ Features ----------- * **utils**: Expand parsable valid git remote url formats (`PR#771`_, `cf75f23`_) 📖 Documentation ---------------- * Minor correction to commit-parsing documentation (`PR#777`_, `245e878`_) .. _245e878: https://github.com/python-semantic-release/python-semantic-release/commit/245e878f02d5cafec6baf0493c921c1e396b56e8 .. _cf75f23: https://github.com/python-semantic-release/python-semantic-release/commit/cf75f237360488ebb0088e5b8aae626e97d9cbdd .. _PR#771: https://github.com/python-semantic-release/python-semantic-release/pull/771 .. _PR#777: https://github.com/python-semantic-release/python-semantic-release/pull/777 .. _changelog-v8.5.2: v8.5.2 (2023-12-19) =================== 🪲 Bug Fixes ------------ * **cli**: Gracefully output configuration validation errors (`PR#772`_, `e8c9d51`_) .. _e8c9d51: https://github.com/python-semantic-release/python-semantic-release/commit/e8c9d516c37466a5dce75a73766d5be0f9e74627 .. _PR#772: https://github.com/python-semantic-release/python-semantic-release/pull/772 .. _changelog-v8.5.1: v8.5.1 (2023-12-12) =================== 🪲 Bug Fixes ------------ * **cmd-version**: Handle committing of git-ignored file gracefully (`PR#764`_, `ea89fa7`_) * **config**: Cleanly handle repository in detached HEAD state (`PR#765`_, `ac4f9aa`_) * **config**: Gracefully fail when repo is in a detached HEAD state (`PR#765`_, `ac4f9aa`_) * **version**: Only commit non git-ignored files during version commit (`PR#764`_, `ea89fa7`_) 📖 Documentation ---------------- * **configuration**: Adjust wording and improve clarity (`PR#766`_, `6b2fc8c`_) * **configuration**: Fix typo in text (`PR#766`_, `6b2fc8c`_) .. _6b2fc8c: https://github.com/python-semantic-release/python-semantic-release/commit/6b2fc8c156e122ee1b43fdb513b2dc3b8fd76724 .. _ac4f9aa: https://github.com/python-semantic-release/python-semantic-release/commit/ac4f9aacb72c99f2479ae33369822faad011a824 .. _ea89fa7: https://github.com/python-semantic-release/python-semantic-release/commit/ea89fa72885e15da91687172355426a22c152513 .. _PR#764: https://github.com/python-semantic-release/python-semantic-release/pull/764 .. _PR#765: https://github.com/python-semantic-release/python-semantic-release/pull/765 .. _PR#766: https://github.com/python-semantic-release/python-semantic-release/pull/766 .. _changelog-v8.5.0: v8.5.0 (2023-12-07) =================== ✨ Features ----------- * Allow template directories to contain a '.' at the top-level (`PR#762`_, `07b232a`_) .. _07b232a: https://github.com/python-semantic-release/python-semantic-release/commit/07b232a3b34be0b28c6af08aea4852acb1b9bd56 .. _PR#762: https://github.com/python-semantic-release/python-semantic-release/pull/762 .. _changelog-v8.4.0: v8.4.0 (2023-12-07) =================== ✨ Features ----------- * **cmd-version**: Add ``--tag/--no-tag`` option to version command (`PR#752`_, `de6b9ad`_) * **version**: Add ``--no-tag`` option to turn off tag creation (`PR#752`_, `de6b9ad`_) 🪲 Bug Fixes ------------ * **version**: Separate push tags from commit push when not committing changes (`PR#752`_, `de6b9ad`_) 📖 Documentation ---------------- * **commands**: Update ``version`` subcommand options (`PR#752`_, `de6b9ad`_) * **migration**: Fix comments about publish command (`PR#747`_, `90380d7`_) .. _90380d7: https://github.com/python-semantic-release/python-semantic-release/commit/90380d797a734dcca5040afc5fa00e3e01f64152 .. _de6b9ad: https://github.com/python-semantic-release/python-semantic-release/commit/de6b9ad921e697b5ea2bb2ea8f180893cecca920 .. _PR#747: https://github.com/python-semantic-release/python-semantic-release/pull/747 .. _PR#752: https://github.com/python-semantic-release/python-semantic-release/pull/752 .. _changelog-v8.3.0: v8.3.0 (2023-10-23) =================== ✨ Features ----------- * **action**: Use composite action for semantic release (`PR#692`_, `4648d87`_) .. _4648d87: https://github.com/python-semantic-release/python-semantic-release/commit/4648d87bac8fb7e6cc361b765b4391b30a8caef8 .. _PR#692: https://github.com/python-semantic-release/python-semantic-release/pull/692 .. _changelog-v8.2.0: v8.2.0 (2023-10-23) =================== ✨ Features ----------- * Allow user customization of release notes template (`PR#736`_, `94a1311`_) 📖 Documentation ---------------- * Add PYTHONPATH mention for commit parser (`3284258`_) .. _3284258: https://github.com/python-semantic-release/python-semantic-release/commit/3284258b9fa1a3fe165f336181aff831d50fddd3 .. _94a1311: https://github.com/python-semantic-release/python-semantic-release/commit/94a131167e1b867f8bc112a042b9766e050ccfd1 .. _PR#736: https://github.com/python-semantic-release/python-semantic-release/pull/736 .. _changelog-v8.1.2: v8.1.2 (2023-10-13) =================== 🪲 Bug Fixes ------------ * Correct lint errors (`a13a6c3`_) * Error when running build command on windows systems (`PR#732`_, `2553657`_) .. _2553657: https://github.com/python-semantic-release/python-semantic-release/commit/25536574760b407410f435441da533fafbf94402 .. _a13a6c3: https://github.com/python-semantic-release/python-semantic-release/commit/a13a6c37e180dc422599939a5725835306c18ff2 .. _PR#732: https://github.com/python-semantic-release/python-semantic-release/pull/732 .. _changelog-v8.1.1: v8.1.1 (2023-09-19) =================== 🪲 Bug Fixes ------------ * Attribute error when logging non-strings (`PR#711`_, `75e6e48`_) .. _75e6e48: https://github.com/python-semantic-release/python-semantic-release/commit/75e6e48129da8238a62d5eccac1ae55d0fee0f9f .. _PR#711: https://github.com/python-semantic-release/python-semantic-release/pull/711 .. _changelog-v8.1.0: v8.1.0 (2023-09-19) =================== ✨ Features ----------- * Upgrade pydantic to v2 (`PR#714`_, `5a5c5d0`_) 📖 Documentation ---------------- * Fix typos (`PR#708`_, `2698b0e`_) * Update project urls (`PR#715`_, `5fd5485`_) .. _2698b0e: https://github.com/python-semantic-release/python-semantic-release/commit/2698b0e006ff7e175430b98450ba248ed523b341 .. _5a5c5d0: https://github.com/python-semantic-release/python-semantic-release/commit/5a5c5d0ee347750d7c417c3242d52e8ada50b217 .. _5fd5485: https://github.com/python-semantic-release/python-semantic-release/commit/5fd54856dfb6774feffc40d36d5bb0f421f04842 .. _PR#708: https://github.com/python-semantic-release/python-semantic-release/pull/708 .. _PR#714: https://github.com/python-semantic-release/python-semantic-release/pull/714 .. _PR#715: https://github.com/python-semantic-release/python-semantic-release/pull/715 .. _changelog-v8.0.8: v8.0.8 (2023-08-26) =================== 🪲 Bug Fixes ------------ * Dynamic_import() import path split (`PR#686`_, `1007a06`_) .. _1007a06: https://github.com/python-semantic-release/python-semantic-release/commit/1007a06d1e16beef6d18f44ff2e0e09921854b54 .. _PR#686: https://github.com/python-semantic-release/python-semantic-release/pull/686 .. _changelog-v8.0.7: v8.0.7 (2023-08-16) =================== 🪲 Bug Fixes ------------ * Use correct upload url for github (`PR#661`_, `8a515ca`_) .. _8a515ca: https://github.com/python-semantic-release/python-semantic-release/commit/8a515caf1f993aa653e024beda2fdb9e629cc42a .. _PR#661: https://github.com/python-semantic-release/python-semantic-release/pull/661 .. _changelog-v8.0.6: v8.0.6 (2023-08-13) =================== 🪲 Bug Fixes ------------ * **publish**: Improve error message when no tags found (`PR#683`_, `bdc06ea`_) .. _bdc06ea: https://github.com/python-semantic-release/python-semantic-release/commit/bdc06ea061c19134d5d74bd9f168700dd5d9bcf5 .. _PR#683: https://github.com/python-semantic-release/python-semantic-release/pull/683 .. _changelog-v8.0.5: v8.0.5 (2023-08-10) =================== 🪲 Bug Fixes ------------ * Don't warn about vcs token if ignore_token_for_push is true. (`PR#670`_, `f1a54a6`_) 📖 Documentation ---------------- * ``password`` should be ``token``. (`PR#670`_, `f1a54a6`_) * Fix typo missing 's' in version_variable[s] in configuration.rst (`PR#668`_, `879186a`_) .. _879186a: https://github.com/python-semantic-release/python-semantic-release/commit/879186aa09a3bea8bbe2b472f892cf7c0712e557 .. _f1a54a6: https://github.com/python-semantic-release/python-semantic-release/commit/f1a54a6c9a05b225b6474d50cd610eca19ec0c34 .. _PR#668: https://github.com/python-semantic-release/python-semantic-release/pull/668 .. _PR#670: https://github.com/python-semantic-release/python-semantic-release/pull/670 .. _changelog-v8.0.4: v8.0.4 (2023-07-26) =================== 🪲 Bug Fixes ------------ * **changelog**: Use version as semver tag by default (`PR#653`_, `5984c77`_) 📖 Documentation ---------------- * Add Python 3.11 to classifiers in metadata (`PR#651`_, `5a32a24`_) * Clarify usage of assets config option (`PR#655`_, `efa2b30`_) .. _5984c77: https://github.com/python-semantic-release/python-semantic-release/commit/5984c7771edc37f0d7d57894adecc2591efc414d .. _5a32a24: https://github.com/python-semantic-release/python-semantic-release/commit/5a32a24bf4128c39903f0c5d3bd0cb1ccba57e18 .. _efa2b30: https://github.com/python-semantic-release/python-semantic-release/commit/efa2b3019b41eb427f0e1c8faa21ad10664295d0 .. _PR#651: https://github.com/python-semantic-release/python-semantic-release/pull/651 .. _PR#653: https://github.com/python-semantic-release/python-semantic-release/pull/653 .. _PR#655: https://github.com/python-semantic-release/python-semantic-release/pull/655 .. _changelog-v8.0.3: v8.0.3 (2023-07-21) =================== 🪲 Bug Fixes ------------ * Skip non-parsable versions when calculating next version (`PR#649`_, `88f25ea`_) .. _88f25ea: https://github.com/python-semantic-release/python-semantic-release/commit/88f25eae62589cdf53dbc3dfcb167a3ae6cba2d3 .. _PR#649: https://github.com/python-semantic-release/python-semantic-release/pull/649 .. _changelog-v8.0.2: v8.0.2 (2023-07-18) =================== 🪲 Bug Fixes ------------ * Handle missing configuration (`PR#644`_, `f15753c`_) 📖 Documentation ---------------- * Better description for tag_format usage (`2129b72`_) * Clarify v8 breaking changes in GitHub action inputs (`PR#643`_, `cda050c`_) * Correct version_toml example in migrating_from_v7.rst (`PR#641`_, `325d5e0`_) .. _2129b72: https://github.com/python-semantic-release/python-semantic-release/commit/2129b729837eccc41a33dbb49785a8a30ce6b187 .. _325d5e0: https://github.com/python-semantic-release/python-semantic-release/commit/325d5e048bd89cb2a94c47029d4878b27311c0f0 .. _cda050c: https://github.com/python-semantic-release/python-semantic-release/commit/cda050cd9e789d81458157ee240ff99ec65c6f25 .. _f15753c: https://github.com/python-semantic-release/python-semantic-release/commit/f15753ce652f36cc03b108c667a26ab74bcbf95d .. _PR#641: https://github.com/python-semantic-release/python-semantic-release/pull/641 .. _PR#643: https://github.com/python-semantic-release/python-semantic-release/pull/643 .. _PR#644: https://github.com/python-semantic-release/python-semantic-release/pull/644 .. _changelog-v8.0.1: v8.0.1 (2023-07-17) =================== 🪲 Bug Fixes ------------ * Invalid version in Git history should not cause a release failure (`PR#632`_, `254430b`_) 📖 Documentation ---------------- * Reduce readthedocs formats and add entries to migration from v7 guide (`9b6ddfe`_) * **migration**: Fix hyperlink (`PR#631`_, `5fbd52d`_) .. _254430b: https://github.com/python-semantic-release/python-semantic-release/commit/254430b5cc5f032016b4c73168f0403c4d87541e .. _5fbd52d: https://github.com/python-semantic-release/python-semantic-release/commit/5fbd52d7de4982b5689651201a0e07b445158645 .. _9b6ddfe: https://github.com/python-semantic-release/python-semantic-release/commit/9b6ddfef448f9de30fa2845034f76655d34a9912 .. _PR#631: https://github.com/python-semantic-release/python-semantic-release/pull/631 .. _PR#632: https://github.com/python-semantic-release/python-semantic-release/pull/632 .. _changelog-v8.0.0: v8.0.0 (2023-07-16) =================== ✨ Features ----------- * **publish-cmd**: Add ``--post-to-release-tag`` option to control where to publish (`PR#619`_, `ec30564`_) * Make it easier to access commit messages in ParsedCommits (`PR#619`_, `ec30564`_) * Remove publication of ``dists/`` to artifact repository (`PR#619`_, `ec30564`_) * Rename 'upload' configuration section to 'publish' (`PR#619`_, `ec30564`_) * **github-action**: Add GitHub Actions output variables (`PR#619`_, `ec30564`_) * **version-cmd**: Add ``--skip-build`` option (`PR#619`_, `ec30564`_) * **version-cmd** Add ``--strict`` version mode (`PR#619`_, `ec30564`_) 🪲 Bug Fixes ------------ * Add logging for token auth, use token for push (`PR#619`_, `ec30564`_) * Caching for repo owner and name (`PR#619`_, `ec30564`_) * Correct assets type in configuration (`PR#619`_, `ec30564`_) * Correct assets type-annotation for RuntimeContext (`PR#619`_, `ec30564`_) * Correct Dockerfile CLI command and GHA fetch (`PR#619`_, `ec30564`_) * Correct handling of build commands (`PR#619`_, `ec30564`_) * Correct logic for generating release notes (`PR#619`_, `ec30564`_) * Create_or_update_release for Gitlab hvcs (`PR#619`_, `ec30564`_) * Make additional attributes available for template authors (`PR#619`_, `ec30564`_) * Only call Github Action output callback once defaults are set (`PR#619`_, `ec30564`_) * Remove commit amending behavior (`PR#619`_, `ec30564`_) * Resolve branch checkout logic in GHA (`PR#619`_, `ec30564`_) * Resolve bug in changelog logic, enable upload to pypi (`PR#619`_, `ec30564`_) * Resolve loss of tag_format configuration (`PR#619`_, `ec30564`_) * **github-action**: Pin Debian version in Dockerfile (`PR#619`_, `ec30564`_) * **github-action**: Correct input parsing (`PR#619`_, `ec30564`_) * **github-action**: Mark container fs as safe for git to operate on (`PR#619`_, `ec30564`_) * **github-action**: Quotation for git config command (`PR#619`_, `ec30564`_) * **github-action**: Remove default for 'force' (`PR#619`_, `ec30564`_) 📖 Documentation ---------------- * Convert to Furo theme (`PR#619`_, `ec30564`_) * Fix typo (`PR#619`_, `ec30564`_) * Remove reference to dist publication (`PR#619`_, `ec30564`_) * Update docs with additional required permissions (`PR#619`_, `ec30564`_) * **changelog-templates**: fix typo (`PR#619`_, `ec30564`_) ♻️ Refactoring --------------- * Remove verify-ci command (`PR#619`_, `ec30564`_) 💥 BREAKING CHANGES -------------------- * numerous breaking changes, see :ref:`migrating-from-v7` for more information .. _ec30564: https://github.com/python-semantic-release/python-semantic-release/commit/ec30564b4ec732c001d76d3c09ba033066d2b6fe .. _PR#619: https://github.com/python-semantic-release/python-semantic-release/pull/619 .. _changelog-v7.34.6: v7.34.6 (2023-06-17) ==================== 🪲 Bug Fixes ------------ * Relax invoke dependency constraint (`18ea200`_) .. _18ea200: https://github.com/python-semantic-release/python-semantic-release/commit/18ea200633fd67e07f3d4121df5aa4c6dd29d154 .. _changelog-v7.34.5: v7.34.5 (2023-06-17) ==================== 🪲 Bug Fixes ------------ * Consider empty commits (`PR#608`_, `6f2e890`_) .. _6f2e890: https://github.com/python-semantic-release/python-semantic-release/commit/6f2e8909636595d3cb5e858f42c63820cda45974 .. _PR#608: https://github.com/python-semantic-release/python-semantic-release/pull/608 .. _changelog-v7.34.4: v7.34.4 (2023-06-15) ==================== 🪲 Bug Fixes ------------ * Docker build fails installing git (`PR#605`_, `9e3eb97`_) .. _9e3eb97: https://github.com/python-semantic-release/python-semantic-release/commit/9e3eb979783bc39ca564c2967c6c77eecba682e6 .. _PR#605: https://github.com/python-semantic-release/python-semantic-release/pull/605 .. _changelog-v7.34.3: v7.34.3 (2023-06-01) ==================== 🪲 Bug Fixes ------------ * Generate markdown linter compliant changelog headers & lists (`PR#597`_, `cc87400`_) .. _cc87400: https://github.com/python-semantic-release/python-semantic-release/commit/cc87400d4a823350de7d02dc3172d2488c9517db .. _PR#597: https://github.com/python-semantic-release/python-semantic-release/pull/597 .. _changelog-v7.34.2: v7.34.2 (2023-05-29) ==================== 🪲 Bug Fixes ------------ * Open all files with explicit utf-8 encoding (`PR#596`_, `cb71f35`_) .. _cb71f35: https://github.com/python-semantic-release/python-semantic-release/commit/cb71f35c26c1655e675fa735fa880d39a2c8af9c .. _PR#596: https://github.com/python-semantic-release/python-semantic-release/pull/596 .. _changelog-v7.34.1: v7.34.1 (2023-05-28) ==================== 🪲 Bug Fixes ------------ * Generate markdown linter compliant changelog headers & lists (`PR#594`_, `9d9d403`_) .. _9d9d403: https://github.com/python-semantic-release/python-semantic-release/commit/9d9d40305c499c907335abe313e3ed122db0b154 .. _PR#594: https://github.com/python-semantic-release/python-semantic-release/pull/594 .. _changelog-v7.34.0: v7.34.0 (2023-05-28) ==================== ✨ Features ----------- * Add option to only parse commits for current working directory (`PR#509`_, `cdf8116`_) .. _cdf8116: https://github.com/python-semantic-release/python-semantic-release/commit/cdf8116c1e415363b10a01f541873e04ad874220 .. _PR#509: https://github.com/python-semantic-release/python-semantic-release/pull/509 .. _changelog-v7.33.5: v7.33.5 (2023-05-19) ==================== 🪲 Bug Fixes ------------ * Update docs and default config for gitmoji changes (`PR#590`_, `192da6e`_) * Update sphinx dep (`PR#590`_, `192da6e`_) 📖 Documentation ---------------- * Update broken badge and add links (`PR#591`_, `0c23447`_) .. _0c23447: https://github.com/python-semantic-release/python-semantic-release/commit/0c234475d27ad887b19170c82deb80293b3a95f1 .. _192da6e: https://github.com/python-semantic-release/python-semantic-release/commit/192da6e1352298b48630423d50191070a1c5ab24 .. _PR#590: https://github.com/python-semantic-release/python-semantic-release/pull/590 .. _PR#591: https://github.com/python-semantic-release/python-semantic-release/pull/591 .. _changelog-v7.33.4: v7.33.4 (2023-05-14) ==================== 🪲 Bug Fixes ------------ * If prerelease, publish prerelease (`PR#587`_, `927da9f`_) .. _927da9f: https://github.com/python-semantic-release/python-semantic-release/commit/927da9f8feb881e02bc08b33dc559bd8e7fc41ab .. _PR#587: https://github.com/python-semantic-release/python-semantic-release/pull/587 .. _changelog-v7.33.3: v7.33.3 (2023-04-24) ==================== 🪲 Bug Fixes ------------ * Trim emojis from config (`PR#583`_, `02902f7`_) * Update Gitmojis according to official node module (`PR#582`_, `806fcfa`_) 📖 Documentation ---------------- * Grammar in ``docs/troubleshooting.rst`` (`PR#557`_, `bbe754a`_) * Spelling and grammar in ``travis.rst`` (`PR#556`_, `3a76e9d`_) * Update repository name (`PR#559`_, `5cdb05e`_) .. _02902f7: https://github.com/python-semantic-release/python-semantic-release/commit/02902f73ee961565c2470c000f00947d9ef06cb1 .. _3a76e9d: https://github.com/python-semantic-release/python-semantic-release/commit/3a76e9d7505c421009eb3e953c32cccac2e70e07 .. _5cdb05e: https://github.com/python-semantic-release/python-semantic-release/commit/5cdb05e20f17b12890e1487c42d317dcbadd06c8 .. _806fcfa: https://github.com/python-semantic-release/python-semantic-release/commit/806fcfa4cfdd3df4b380afd015a68dc90d54215a .. _bbe754a: https://github.com/python-semantic-release/python-semantic-release/commit/bbe754a3db9ce7132749e7902fe118b52f48ee42 .. _PR#556: https://github.com/python-semantic-release/python-semantic-release/pull/556 .. _PR#557: https://github.com/python-semantic-release/python-semantic-release/pull/557 .. _PR#559: https://github.com/python-semantic-release/python-semantic-release/pull/559 .. _PR#582: https://github.com/python-semantic-release/python-semantic-release/pull/582 .. _PR#583: https://github.com/python-semantic-release/python-semantic-release/pull/583 .. _changelog-v7.33.2: v7.33.2 (2023-02-17) ==================== 🪲 Bug Fixes ------------ * Inconsistent versioning between print-version and publish (`PR#524`_, `17d60e9`_) .. _17d60e9: https://github.com/python-semantic-release/python-semantic-release/commit/17d60e9bf66f62e5845065486c9d5e450f74839a .. _PR#524: https://github.com/python-semantic-release/python-semantic-release/pull/524 .. _changelog-v7.33.1: v7.33.1 (2023-02-01) ==================== 🪲 Bug Fixes ------------ * **action**: Mark container fs as safe for git (`PR#552`_, `2a55f68`_) .. _2a55f68: https://github.com/python-semantic-release/python-semantic-release/commit/2a55f68e2b3cb9ffa9204c00ddbf12706af5c070 .. _PR#552: https://github.com/python-semantic-release/python-semantic-release/pull/552 .. _changelog-v7.33.0: v7.33.0 (2023-01-15) ==================== ✨ Features ----------- * Add signing options to action (`31ad5eb`_) * Update action with configuration options (`PR#518`_, `4664afe`_) * **repository**: Add support for TWINE_CERT, closes `#521`_ (`PR#522`_, `d56e85d`_) 🪲 Bug Fixes ------------ * Changelog release commit search logic (`PR#530`_, `efb3410`_) * **github-actions**: Bump Dockerfile to use Python 3.10 image, closes `#533`_ (`PR#536`_, `8f2185d`_) * **action**: Fix environment variable names (`3c66218`_) 📖 Documentation ---------------- * Update documentation (`5cbdad2`_) .. _#521: https://github.com/python-semantic-release/python-semantic-release/issues/521 .. _#533: https://github.com/python-semantic-release/python-semantic-release/issues/533 .. _31ad5eb: https://github.com/python-semantic-release/python-semantic-release/commit/31ad5eb5a25f0ea703afc295351104aefd66cac1 .. _3c66218: https://github.com/python-semantic-release/python-semantic-release/commit/3c66218640044adf263fcf9b2714cfc4b99c2e90 .. _4664afe: https://github.com/python-semantic-release/python-semantic-release/commit/4664afe5f80a04834e398fefb841b166a51d95b7 .. _5cbdad2: https://github.com/python-semantic-release/python-semantic-release/commit/5cbdad296034a792c9bf05e3700eac4f847eb469 .. _8f2185d: https://github.com/python-semantic-release/python-semantic-release/commit/8f2185d570b3966b667ac591ae523812e9d2e00f .. _d56e85d: https://github.com/python-semantic-release/python-semantic-release/commit/d56e85d1f2ac66fb0b59af2178164ca915dbe163 .. _efb3410: https://github.com/python-semantic-release/python-semantic-release/commit/efb341036196c39b4694ca4bfa56c6b3e0827c6c .. _PR#518: https://github.com/python-semantic-release/python-semantic-release/pull/518 .. _PR#522: https://github.com/python-semantic-release/python-semantic-release/pull/522 .. _PR#530: https://github.com/python-semantic-release/python-semantic-release/pull/530 .. _PR#536: https://github.com/python-semantic-release/python-semantic-release/pull/536 .. _PR#541: https://github.com/python-semantic-release/python-semantic-release/pull/541 .. _changelog-v7.32.2: v7.32.2 (2022-10-22) ==================== 🪲 Bug Fixes ------------ * Fix changelog generation in tag-mode (`PR#171`_, `482a62e`_) 📖 Documentation ---------------- * Fix code blocks (`PR#506`_, `24b7673`_) .. _24b7673: https://github.com/python-semantic-release/python-semantic-release/commit/24b767339fcef1c843f7dd3188900adab05e03b1 .. _482a62e: https://github.com/python-semantic-release/python-semantic-release/commit/482a62ec374208b2d57675cb0b7f0ab9695849b9 .. _PR#171: https://github.com/python-semantic-release/python-semantic-release/pull/171 .. _PR#506: https://github.com/python-semantic-release/python-semantic-release/pull/506 .. _changelog-v7.32.1: v7.32.1 (2022-10-07) ==================== 🪲 Bug Fixes ------------ * Corrections for deprecation warnings (`PR#505`_, `d47afb6`_) 📖 Documentation ---------------- * Correct spelling mistakes (`PR#504`_, `3717e0d`_) .. _3717e0d: https://github.com/python-semantic-release/python-semantic-release/commit/3717e0d8810f5d683847c7b0e335eeefebbf2921 .. _d47afb6: https://github.com/python-semantic-release/python-semantic-release/commit/d47afb6516238939e174f946977bf4880062a622 .. _PR#504: https://github.com/python-semantic-release/python-semantic-release/pull/504 .. _PR#505: https://github.com/python-semantic-release/python-semantic-release/pull/505 .. _changelog-v7.32.0: v7.32.0 (2022-09-25) ==================== ✨ Features ----------- * Add setting for enforcing textual changelog sections, closes `#498`_ (`PR#502`_, `988437d`_) 📖 Documentation ---------------- * Correct documented default behavior for ``commit_version_number`` (`PR#497`_, `ffae2dc`_) .. _#498: https://github.com/python-semantic-release/python-semantic-release/issues/498 .. _988437d: https://github.com/python-semantic-release/python-semantic-release/commit/988437d21e40d3e3b1c95ed66b535bdd523210de .. _ffae2dc: https://github.com/python-semantic-release/python-semantic-release/commit/ffae2dc68f7f4bc13c5fd015acd43b457e568ada .. _PR#497: https://github.com/python-semantic-release/python-semantic-release/pull/497 .. _PR#502: https://github.com/python-semantic-release/python-semantic-release/pull/502 .. _changelog-v7.31.4: v7.31.4 (2022-08-23) ==================== 🪲 Bug Fixes ------------ * Account for trailing newlines in commit messages, closes `#490`_ (`PR#495`_, `111b151`_) .. _#490: https://github.com/python-semantic-release/python-semantic-release/issues/490 .. _111b151: https://github.com/python-semantic-release/python-semantic-release/commit/111b1518e8c8e2bd7535bd4c4b126548da384605 .. _PR#495: https://github.com/python-semantic-release/python-semantic-release/pull/495 .. _changelog-v7.31.3: v7.31.3 (2022-08-22) ==================== 🪲 Bug Fixes ------------ * Use ``commit_subject`` when searching for release commits (`PR#488`_, `3849ed9`_) .. _3849ed9: https://github.com/python-semantic-release/python-semantic-release/commit/3849ed992c3cff9054b8690bcf59e49768f84f47 .. _PR#488: https://github.com/python-semantic-release/python-semantic-release/pull/488 .. _changelog-v7.31.2: v7.31.2 (2022-07-29) ==================== 🪲 Bug Fixes ------------ * Add better handling of missing changelog placeholder, closes `#454`_ (`e7a0e81`_) * Add repo=None when not in git repo, closes `#422`_ (`40be804`_) 📖 Documentation ---------------- * Add example for pyproject.toml (`2a4b8af`_) .. _#422: https://github.com/python-semantic-release/python-semantic-release/issues/422 .. _#454: https://github.com/python-semantic-release/python-semantic-release/issues/454 .. _2a4b8af: https://github.com/python-semantic-release/python-semantic-release/commit/2a4b8af1c2893a769c02476bb92f760c8522bd7a .. _40be804: https://github.com/python-semantic-release/python-semantic-release/commit/40be804c09ab8a036fb135c9c38a63f206d2742c .. _e7a0e81: https://github.com/python-semantic-release/python-semantic-release/commit/e7a0e81c004ade73ed927ba4de8c3e3ccaf0047c .. _changelog-v7.31.1: v7.31.1 (2022-07-29) ==================== 🪲 Bug Fixes ------------ * Update git email in action, closes `#473`_ (`0ece6f2`_) .. _#473: https://github.com/python-semantic-release/python-semantic-release/issues/473 .. _0ece6f2: https://github.com/python-semantic-release/python-semantic-release/commit/0ece6f263ff02a17bb1e00e7ed21c490f72e3d00 .. _changelog-v7.31.0: v7.31.0 (2022-07-29) ==================== ✨ Features ----------- * Add prerelease-patch and no-prerelease-patch flags for whether to auto-bump prereleases (`b4e5b62`_) * Override repository_url w REPOSITORY_URL env var (`PR#439`_, `cb7578c`_) 🪲 Bug Fixes ------------ * :bug: fix get_current_release_version for tag_only version_source (`cad09be`_) .. _b4e5b62: https://github.com/python-semantic-release/python-semantic-release/commit/b4e5b626074f969e4140c75fdac837a0625cfbf6 .. _cad09be: https://github.com/python-semantic-release/python-semantic-release/commit/cad09be9ba067f1c882379c0f4b28115a287fc2b .. _cb7578c: https://github.com/python-semantic-release/python-semantic-release/commit/cb7578cf005b8bd65d9b988f6f773e4c060982e3 .. _PR#439: https://github.com/python-semantic-release/python-semantic-release/pull/439 .. _changelog-v7.30.2: v7.30.2 (2022-07-26) ==================== 🪲 Bug Fixes ------------ * Declare additional_options as action inputs (`PR#481`_, `cb5d8c7`_) .. _cb5d8c7: https://github.com/python-semantic-release/python-semantic-release/commit/cb5d8c7ce7d013fcfabd7696b5ffb846a8a6f853 .. _PR#481: https://github.com/python-semantic-release/python-semantic-release/pull/481 .. _changelog-v7.30.1: v7.30.1 (2022-07-25) ==================== 🪲 Bug Fixes ------------ * Don't use commit_subject for tag pattern matching (`PR#480`_, `ac3f11e`_) .. _ac3f11e: https://github.com/python-semantic-release/python-semantic-release/commit/ac3f11e689f4a290d20b68b9c5c214098eb61b5f .. _PR#480: https://github.com/python-semantic-release/python-semantic-release/pull/480 .. _changelog-v7.30.0: v7.30.0 (2022-07-25) ==================== ✨ Features ----------- * Add ``additional_options`` input for GitHub Action (`PR#477`_, `aea60e3`_) 🪲 Bug Fixes ------------ * Allow empty additional options (`PR#479`_, `c9b2514`_) .. _aea60e3: https://github.com/python-semantic-release/python-semantic-release/commit/aea60e3d290c6fe3137bff21e0db1ed936233776 .. _c9b2514: https://github.com/python-semantic-release/python-semantic-release/commit/c9b2514d3e164b20e78b33f60989d78c2587e1df .. _PR#477: https://github.com/python-semantic-release/python-semantic-release/pull/477 .. _PR#479: https://github.com/python-semantic-release/python-semantic-release/pull/479 .. _changelog-v7.29.7: v7.29.7 (2022-07-24) ==================== 🪲 Bug Fixes ------------ * Ignore dependency version bumps when parsing version from commit logs (`PR#476`_, `51bcb78`_) .. _51bcb78: https://github.com/python-semantic-release/python-semantic-release/commit/51bcb780a9f55fadfaf01612ff65c1f92642c2c1 .. _PR#476: https://github.com/python-semantic-release/python-semantic-release/pull/476 .. _changelog-v7.29.6: v7.29.6 (2022-07-15) ==================== 🪲 Bug Fixes ------------ * Allow changing prerelease tag using CLI flags (`PR#466`_, `395bf4f`_) .. _395bf4f: https://github.com/python-semantic-release/python-semantic-release/commit/395bf4f2de73663c070f37cced85162d41934213 .. _PR#466: https://github.com/python-semantic-release/python-semantic-release/pull/466 .. _changelog-v7.29.5: v7.29.5 (2022-07-14) ==================== 🪲 Bug Fixes ------------ * Add packaging module requirement (`PR#469`_, `b99c9fa`_) * **publish**: Get version bump for current release (`PR#467`_, `dd26888`_) .. _b99c9fa: https://github.com/python-semantic-release/python-semantic-release/commit/b99c9fa88dc25e5ceacb131cd93d9079c4fb2c86 .. _dd26888: https://github.com/python-semantic-release/python-semantic-release/commit/dd26888a923b2f480303c19f1916647de48b02bf .. _PR#467: https://github.com/python-semantic-release/python-semantic-release/pull/467 .. _PR#469: https://github.com/python-semantic-release/python-semantic-release/pull/469 .. _changelog-v7.29.4: v7.29.4 (2022-06-29) ==================== 🪲 Bug Fixes ------------ * Add text for empty ValueError (`PR#461`_, `733254a`_) .. _733254a: https://github.com/python-semantic-release/python-semantic-release/commit/733254a99320d8c2f964d799ac4ec29737867faa .. _PR#461: https://github.com/python-semantic-release/python-semantic-release/pull/461 .. _changelog-v7.29.3: v7.29.3 (2022-06-26) ==================== 🪲 Bug Fixes ------------ * Ensure that assets can be uploaded successfully on custom GitHub servers (`PR#458`_, `32b516d`_) .. _32b516d: https://github.com/python-semantic-release/python-semantic-release/commit/32b516d7aded4afcafe4aa56d6a5a329b3fc371d .. _PR#458: https://github.com/python-semantic-release/python-semantic-release/pull/458 .. _changelog-v7.29.2: v7.29.2 (2022-06-20) ==================== 🪲 Bug Fixes ------------ * Ensure should_bump checks against release version if not prerelease (`PR#457`_, `da0606f`_) .. _da0606f: https://github.com/python-semantic-release/python-semantic-release/commit/da0606f0d67ada5f097c704b9423ead3b5aca6b2 .. _PR#457: https://github.com/python-semantic-release/python-semantic-release/pull/457 .. _changelog-v7.29.1: v7.29.1 (2022-06-01) ==================== 🪲 Bug Fixes ------------ * Capture correct release version when patch has more than one digit (`PR#448`_, `426cdc7`_) .. _426cdc7: https://github.com/python-semantic-release/python-semantic-release/commit/426cdc7d7e0140da67f33b6853af71b2295aaac2 .. _PR#448: https://github.com/python-semantic-release/python-semantic-release/pull/448 .. _changelog-v7.29.0: v7.29.0 (2022-05-27) ==================== ✨ Features ----------- * Allow using ssh-key to push version while using token to publish to hvcs (`PR#419`_, `7b2dffa`_) * **config**: Add ignore_token_for_push param (`PR#419`_, `7b2dffa`_) 🪲 Bug Fixes ------------ * Fix and refactor prerelease (`PR#435`_, `94c9494`_) * **test**: Override GITHUB_ACTOR env (`PR#419`_, `7b2dffa`_) 📖 Documentation ---------------- * Add documentation for ignore_token_for_push (`PR#419`_, `7b2dffa`_) .. _7b2dffa: https://github.com/python-semantic-release/python-semantic-release/commit/7b2dffadf43c77d5e0eea307aefcee5c7744df5c .. _94c9494: https://github.com/python-semantic-release/python-semantic-release/commit/94c94942561f85f48433c95fd3467e03e0893ab4 .. _PR#419: https://github.com/python-semantic-release/python-semantic-release/pull/419 .. _PR#435: https://github.com/python-semantic-release/python-semantic-release/pull/435 .. _changelog-v7.28.1: v7.28.1 (2022-04-14) ==================== 🪲 Bug Fixes ------------ * Fix getting current version when ``version_source=tag_only`` (`PR#437`_, `b247936`_) .. _b247936: https://github.com/python-semantic-release/python-semantic-release/commit/b247936a81c0d859a34bf9f17ab8ca6a80488081 .. _PR#437: https://github.com/python-semantic-release/python-semantic-release/pull/437 .. _changelog-v7.28.0: v7.28.0 (2022-04-11) ==================== ✨ Features ----------- * Add ``tag_only`` option for ``version_source``, closes `#354`_ (`PR#436`_, `cf74339`_) .. _#354: https://github.com/python-semantic-release/python-semantic-release/issues/354 .. _cf74339: https://github.com/python-semantic-release/python-semantic-release/commit/cf743395456a86c62679c2c0342502af043bfc3b .. _PR#436: https://github.com/python-semantic-release/python-semantic-release/pull/436 .. _changelog-v7.27.1: v7.27.1 (2022-04-03) ==================== 🪲 Bug Fixes ------------ * **prerelease**: Pass prerelease option to get_current_version (`PR#432`_, `aabab0b`_) .. _aabab0b: https://github.com/python-semantic-release/python-semantic-release/commit/aabab0b7ce647d25e0c78ae6566f1132ece9fcb9 .. _PR#432: https://github.com/python-semantic-release/python-semantic-release/pull/432 .. _changelog-v7.27.0: v7.27.0 (2022-03-15) ==================== ✨ Features ----------- * Add git-lfs to docker container (`PR#427`_, `184e365`_) .. _184e365: https://github.com/python-semantic-release/python-semantic-release/commit/184e3653932979b82e5a62b497f2a46cbe15ba87 .. _PR#427: https://github.com/python-semantic-release/python-semantic-release/pull/427 .. _changelog-v7.26.0: v7.26.0 (2022-03-07) ==================== ✨ Features ----------- * **publish-cmd**: add ``--prerelease`` cli flag to enable prerelease versioning (`PR#413`_, `7064265`_) * **version-cmd**: add ``--prerelease`` cli flag to enable prerelease versioning (`PR#413`_, `7064265`_) 📖 Documentation ---------------- * Added basic info about prerelease versioning (`PR#413`_, `7064265`_) .. _7064265: https://github.com/python-semantic-release/python-semantic-release/commit/7064265627a2aba09caa2873d823b594e0e23e77 .. _PR#413: https://github.com/python-semantic-release/python-semantic-release/pull/413 .. _changelog-v7.25.2: v7.25.2 (2022-02-24) ==================== 🪲 Bug Fixes ------------ * **gitea**: Use form-data from asset upload (`PR#421`_, `e011944`_) .. _e011944: https://github.com/python-semantic-release/python-semantic-release/commit/e011944987885f75b80fe16a363f4befb2519a91 .. _PR#421: https://github.com/python-semantic-release/python-semantic-release/pull/421 .. _changelog-v7.25.1: v7.25.1 (2022-02-23) ==================== 🪲 Bug Fixes ------------ * **gitea**: Build status and asset upload (`PR#420`_, `57db81f`_) * **gitea**: Handle list build status response (`PR#420`_, `57db81f`_) .. _57db81f: https://github.com/python-semantic-release/python-semantic-release/commit/57db81f4c6b96da8259e3bad9137eaccbcd10f6e .. _PR#420: https://github.com/python-semantic-release/python-semantic-release/pull/420 .. _changelog-v7.25.0: v7.25.0 (2022-02-17) ==================== ✨ Features ----------- * **hvcs**: Add gitea support (`PR#412`_, `b7e7936`_) 📖 Documentation ---------------- * Document tag_commit, closes `#410`_ (`b631ca0`_) .. _#410: https://github.com/python-semantic-release/python-semantic-release/issues/410 .. _b631ca0: https://github.com/python-semantic-release/python-semantic-release/commit/b631ca0a79cb2d5499715d43688fc284cffb3044 .. _b7e7936: https://github.com/python-semantic-release/python-semantic-release/commit/b7e7936331b7939db09abab235c8866d800ddc1a .. _PR#412: https://github.com/python-semantic-release/python-semantic-release/pull/412 .. _changelog-v7.24.0: v7.24.0 (2022-01-24) ==================== ✨ Features ----------- * Include additional changes in release commits (`3e34f95`_) .. _3e34f95: https://github.com/python-semantic-release/python-semantic-release/commit/3e34f957ff5a3ec6e6f984cc4a79a38ce4391ea9 .. _changelog-v7.23.0: v7.23.0 (2021-11-30) ==================== ✨ Features ----------- * Support Github Enterprise server (`b4e01f1`_) .. _b4e01f1: https://github.com/python-semantic-release/python-semantic-release/commit/b4e01f1b7e841263fa84f57f0ac331f7c0b31954 .. _changelog-v7.22.0: v7.22.0 (2021-11-21) ==================== ✨ Features ----------- * **parser_angular**: Allow customization in parser (`298eebb`_) 🪲 Bug Fixes ------------ * Address PR feedback for ``parser_angular.py`` (`f7bc458`_) .. _298eebb: https://github.com/python-semantic-release/python-semantic-release/commit/298eebbfab5c083505036ba1df47a5874a1eed6e .. _f7bc458: https://github.com/python-semantic-release/python-semantic-release/commit/f7bc45841e6a5c762f99f936c292cee25fabcd02 .. _changelog-v7.21.0: v7.21.0 (2021-11-21) ==================== ✨ Features ----------- * Use gitlab-ci or github actions env vars, closes `#363`_ (`8ca8dd4`_) 🪲 Bug Fixes ------------ * Remove invalid repository exception (`746b62d`_) .. _#363: https://github.com/python-semantic-release/python-semantic-release/issues/363 .. _746b62d: https://github.com/python-semantic-release/python-semantic-release/commit/746b62d4e207a5d491eecd4ca96d096eb22e3bed .. _8ca8dd4: https://github.com/python-semantic-release/python-semantic-release/commit/8ca8dd40f742f823af147928bd75a9577c50d0fd .. _changelog-v7.20.0: v7.20.0 (2021-11-21) ==================== ✨ Features ----------- * Allow custom environment variable names (`PR#392`_, `372cda3`_) * Rewrite Twine adapter for uploading to artifact repositories (`cfb20af`_) 🪲 Bug Fixes ------------ * Don't use linux commands on windows (`PR#393`_, `5bcccd2`_) * Mypy errors in vcs_helpers (`13ca0fe`_) * Skip removing the build folder if it doesn't exist (`8e79fdc`_) 📖 Documentation ---------------- * Clean typos and add section for repository upload (`1efa18a`_) .. _13ca0fe: https://github.com/python-semantic-release/python-semantic-release/commit/13ca0fe650125be2f5e953f6193fdc4d44d3c75a .. _1efa18a: https://github.com/python-semantic-release/python-semantic-release/commit/1efa18a3a55134d6bc6e4572ab025e24082476cd .. _372cda3: https://github.com/python-semantic-release/python-semantic-release/commit/372cda3497f16ead2209e6e1377d38f497144883 .. _5bcccd2: https://github.com/python-semantic-release/python-semantic-release/commit/5bcccd21cc8be3289db260e645fec8dc6a592abd .. _8e79fdc: https://github.com/python-semantic-release/python-semantic-release/commit/8e79fdc107ffd852a91dfb5473e7bd1dfaba4ee5 .. _cfb20af: https://github.com/python-semantic-release/python-semantic-release/commit/cfb20af79a8e25a77aee9ff72deedcd63cb7f62f .. _PR#392: https://github.com/python-semantic-release/python-semantic-release/pull/392 .. _PR#393: https://github.com/python-semantic-release/python-semantic-release/pull/393 .. _changelog-v7.19.2: v7.19.2 (2021-09-04) ==================== 🪲 Bug Fixes ------------ * Fixed ImproperConfig import error (`PR#377`_, `b011a95`_) .. _b011a95: https://github.com/python-semantic-release/python-semantic-release/commit/b011a9595df4240cb190bfb1ab5b6d170e430dfc .. _PR#377: https://github.com/python-semantic-release/python-semantic-release/pull/377 .. _changelog-v7.19.1: v7.19.1 (2021-08-17) ==================== 🪲 Bug Fixes ------------ * Add get_formatted_tag helper instead of hardcoded v-prefix in the git tags (`1a354c8`_) .. _1a354c8: https://github.com/python-semantic-release/python-semantic-release/commit/1a354c86abad77563ebce9a6944256461006f3c7 .. _changelog-v7.19.0: v7.19.0 (2021-08-16) ==================== ✨ Features ----------- * Custom git tag format support (`PR#373`_, `1d76632`_) 📖 Documentation ---------------- * **configuration**: define ``tag_format`` usage & resulting effect (`PR#373`_, `1d76632`_) * **parser**: Documentation for scipy-parser (`45ee34a`_) .. _1d76632: https://github.com/python-semantic-release/python-semantic-release/commit/1d76632043bf0b6076d214a63c92013624f4b95e .. _45ee34a: https://github.com/python-semantic-release/python-semantic-release/commit/45ee34aa21443860a6c2cd44a52da2f353b960bf .. _PR#373: https://github.com/python-semantic-release/python-semantic-release/pull/373 .. _changelog-v7.18.0: v7.18.0 (2021-08-09) ==================== ✨ Features ----------- * Add support for non-prefixed tags (`PR#366`_, `0fee4dd`_) 📖 Documentation ---------------- * Clarify second argument of ParsedCommit (`086ddc2`_) .. _086ddc2: https://github.com/python-semantic-release/python-semantic-release/commit/086ddc28f06522453328f5ea94c873bd202ff496 .. _0fee4dd: https://github.com/python-semantic-release/python-semantic-release/commit/0fee4ddb5baaddf85ed6b76e76a04474a5f97d0a .. _PR#366: https://github.com/python-semantic-release/python-semantic-release/pull/366 .. _changelog-v7.17.0: v7.17.0 (2021-08-07) ==================== ✨ Features ----------- * **parser**: Add scipy style parser (`PR#369`_, `51a3921`_) .. _51a3921: https://github.com/python-semantic-release/python-semantic-release/commit/51a39213ea120c4bbd7a57b74d4f0cc3103da9f5 .. _PR#369: https://github.com/python-semantic-release/python-semantic-release/pull/369 .. _changelog-v7.16.4: v7.16.4 (2021-08-03) ==================== 🪲 Bug Fixes ------------ * Correct rendering of gitlab issue references, closes `#358`_ (`07429ec`_) .. _#358: https://github.com/python-semantic-release/python-semantic-release/issues/358 .. _07429ec: https://github.com/python-semantic-release/python-semantic-release/commit/07429ec4a32d32069f25ec77b4bea963bd5d2a00 .. _changelog-v7.16.3: v7.16.3 (2021-07-29) ==================== 🪲 Bug Fixes ------------ * Print right info if token is not set, closes `#360`_ (`PR#361`_, `a275a7a`_) .. _#360: https://github.com/python-semantic-release/python-semantic-release/issues/360 .. _a275a7a: https://github.com/python-semantic-release/python-semantic-release/commit/a275a7a17def85ff0b41d254e4ee42772cce1981 .. _PR#361: https://github.com/python-semantic-release/python-semantic-release/pull/361 .. _changelog-v7.16.2: v7.16.2 (2021-06-25) ==================== 🪲 Bug Fixes ------------ * Use release-api for gitlab (`1ef5cab`_) 📖 Documentation ---------------- * Recommend setting a concurrency group for GitHub Actions (`34b0735`_) * Update trove classifiers to reflect supported versions (`PR#344`_, `7578004`_) .. _1ef5cab: https://github.com/python-semantic-release/python-semantic-release/commit/1ef5caba2d8dd0f2647bc51ede0ef7152d8b7b8d .. _34b0735: https://github.com/python-semantic-release/python-semantic-release/commit/34b07357ab3f4f4aa787b71183816ec8aaf334a8 .. _7578004: https://github.com/python-semantic-release/python-semantic-release/commit/7578004ed4b20c2bd553782443dfd77535faa377 .. _PR#344: https://github.com/python-semantic-release/python-semantic-release/pull/344 .. _changelog-v7.16.1: v7.16.1 (2021-06-08) ==================== 🪲 Bug Fixes ------------ * Tomlkit should stay at 0.7.0 (`769a5f3`_) .. _769a5f3: https://github.com/python-semantic-release/python-semantic-release/commit/769a5f31115cdb1f43f19a23fe72b96a8c8ba0fc .. _changelog-v7.16.0: v7.16.0 (2021-06-08) ==================== ✨ Features ----------- * Add option to omit tagging (`PR#341`_, `20603e5`_) .. _20603e5: https://github.com/python-semantic-release/python-semantic-release/commit/20603e53116d4f05e822784ce731b42e8cbc5d8f .. _PR#341: https://github.com/python-semantic-release/python-semantic-release/pull/341 .. _changelog-v7.15.6: v7.15.6 (2021-06-08) ==================== 🪲 Bug Fixes ------------ * Update click and tomlkit (`PR#339`_, `947ea3b`_) .. _947ea3b: https://github.com/python-semantic-release/python-semantic-release/commit/947ea3bc0750735941446cf4a87bae20e750ba12 .. _PR#339: https://github.com/python-semantic-release/python-semantic-release/pull/339 .. _changelog-v7.15.5: v7.15.5 (2021-05-26) ==================== 🪲 Bug Fixes ------------ * Pin tomlkit to 0.7.0 (`2cd0db4`_) .. _2cd0db4: https://github.com/python-semantic-release/python-semantic-release/commit/2cd0db4537bb9497b72eb496f6bab003070672ab .. _changelog-v7.15.4: v7.15.4 (2021-04-29) ==================== 🪲 Bug Fixes ------------ * Change log level of failed toml loading, closes `#235`_ (`24bb079`_) .. _#235: https://github.com/python-semantic-release/python-semantic-release/issues/235 .. _24bb079: https://github.com/python-semantic-release/python-semantic-release/commit/24bb079cbeff12e7043dd35dd0b5ae03192383bb .. _changelog-v7.15.3: v7.15.3 (2021-04-03) ==================== 🪲 Bug Fixes ------------ * Add venv to path in github action (`583c5a1`_) .. _583c5a1: https://github.com/python-semantic-release/python-semantic-release/commit/583c5a13e40061fc544b82decfe27a6c34f6d265 .. _changelog-v7.15.2: v7.15.2 (2021-04-03) ==================== 🪲 Bug Fixes ------------ * Run semantic-release in virtualenv in the github action, closes `#331`_ (`b508ea9`_) * Set correct path for venv in action script (`aac02b5`_) * Use absolute path for venv in github action (`d4823b3`_) 📖 Documentation ---------------- * Clarify that HVCS should be lowercase, closes `#330`_ (`da0ab0c`_) .. _#330: https://github.com/python-semantic-release/python-semantic-release/issues/330 .. _#331: https://github.com/python-semantic-release/python-semantic-release/issues/331 .. _aac02b5: https://github.com/python-semantic-release/python-semantic-release/commit/aac02b5a44a6959328d5879578aa3536bdf856c2 .. _b508ea9: https://github.com/python-semantic-release/python-semantic-release/commit/b508ea9f411c1cd4f722f929aab9f0efc0890448 .. _d4823b3: https://github.com/python-semantic-release/python-semantic-release/commit/d4823b3b6b1fcd5c33b354f814643c9aaf85a06a .. _da0ab0c: https://github.com/python-semantic-release/python-semantic-release/commit/da0ab0c62c4ce2fa0d815e5558aeec1a1e23bc89 .. _changelog-v7.15.1: v7.15.1 (2021-03-26) ==================== 🪲 Bug Fixes ------------ * Add support for setting build_command to "false", closes `#328`_ (`520cf1e`_) * Upgrade python-gitlab range, closes `#329`_ (`abfacc4`_) 📖 Documentation ---------------- * Add common options to documentation, closes `#327`_ (`20d79a5`_) .. _#327: https://github.com/python-semantic-release/python-semantic-release/issues/327 .. _#328: https://github.com/python-semantic-release/python-semantic-release/issues/328 .. _#329: https://github.com/python-semantic-release/python-semantic-release/issues/329 .. _20d79a5: https://github.com/python-semantic-release/python-semantic-release/commit/20d79a51bffa26d40607c1b77d10912992279112 .. _520cf1e: https://github.com/python-semantic-release/python-semantic-release/commit/520cf1eaa7816d0364407dbd17b5bc7c79806086 .. _abfacc4: https://github.com/python-semantic-release/python-semantic-release/commit/abfacc432300941d57488842e41c06d885637e6c .. _changelog-v7.15.0: v7.15.0 (2021-02-18) ==================== ✨ Features ----------- * Allow the use of .pypirc for twine uploads (`PR#325`_, `6bc56b8`_) 📖 Documentation ---------------- * Add documentation for releasing on a Jenkins instance (`PR#324`_, `77ad988`_) .. _6bc56b8: https://github.com/python-semantic-release/python-semantic-release/commit/6bc56b8aa63069a25a828a2d1a9038ecd09b7d5d .. _77ad988: https://github.com/python-semantic-release/python-semantic-release/commit/77ad988a2057be59e4559614a234d6871c06ee37 .. _PR#324: https://github.com/python-semantic-release/python-semantic-release/pull/324 .. _PR#325: https://github.com/python-semantic-release/python-semantic-release/pull/325 .. _changelog-v7.14.0: v7.14.0 (2021-02-11) ==================== ✨ Features ----------- * **checks**: Add support for Jenkins CI (`PR#322`_, `3e99855`_) 📖 Documentation ---------------- * Correct casing on proper nouns (`PR#320`_, `d51b999`_) * Correcting Python casing (`PR#320`_, `d51b999`_) * Correcting Semantic Versioning casing (`PR#320`_, `d51b999`_) .. _3e99855: https://github.com/python-semantic-release/python-semantic-release/commit/3e99855c6bc72b3e9a572c58cc14e82ddeebfff8 .. _d51b999: https://github.com/python-semantic-release/python-semantic-release/commit/d51b999a245a4e56ff7a09d0495c75336f2f150d .. _PR#320: https://github.com/python-semantic-release/python-semantic-release/pull/320 .. _PR#322: https://github.com/python-semantic-release/python-semantic-release/pull/322 .. _changelog-v7.13.2: v7.13.2 (2021-01-29) ==================== 🪲 Bug Fixes ------------ * Crash when TOML has no PSR section (`PR#319`_, `5f8ab99`_) * Fix crash when TOML has no PSR section (`PR#319`_, `5f8ab99`_) 📖 Documentation ---------------- * Fix ``version_toml`` example for Poetry (`PR#318`_, `39acb68`_) .. _39acb68: https://github.com/python-semantic-release/python-semantic-release/commit/39acb68bfffe8242040e476893639ba26fa0d6b5 .. _5f8ab99: https://github.com/python-semantic-release/python-semantic-release/commit/5f8ab99bf7254508f4b38fcddef2bdde8dd15a4c .. _PR#318: https://github.com/python-semantic-release/python-semantic-release/pull/318 .. _PR#319: https://github.com/python-semantic-release/python-semantic-release/pull/319 .. _changelog-v7.13.1: v7.13.1 (2021-01-26) ==================== 🪲 Bug Fixes ------------ * Use multiline version_pattern match in replace, closes `#306`_ (`PR#315`_, `1a85af4`_) .. _#306: https://github.com/python-semantic-release/python-semantic-release/issues/306 .. _1a85af4: https://github.com/python-semantic-release/python-semantic-release/commit/1a85af434325ce52e11b49895e115f7a936e417e .. _PR#315: https://github.com/python-semantic-release/python-semantic-release/pull/315 .. _changelog-v7.13.0: v7.13.0 (2021-01-26) ==================== ✨ Features ----------- * Support toml files for version declaration, closes `#245`_, `#275`_ (`PR#307`_, `9b62a7e`_) .. _#245: https://github.com/python-semantic-release/python-semantic-release/issues/245 .. _#275: https://github.com/python-semantic-release/python-semantic-release/issues/275 .. _9b62a7e: https://github.com/python-semantic-release/python-semantic-release/commit/9b62a7e377378667e716384684a47cdf392093fa .. _PR#307: https://github.com/python-semantic-release/python-semantic-release/pull/307 .. _changelog-v7.12.0: v7.12.0 (2021-01-25) ==================== ✨ Features ----------- * **github**: Retry GitHub API requests on failure (`PR#314`_, `ac241ed`_) 🪲 Bug Fixes ------------ * **github**: Add retries to github API requests (`PR#314`_, `ac241ed`_) 📖 Documentation ---------------- * **actions**: Pat must be passed to checkout step too, closes `#311`_ (`e2d8e47`_) .. _#311: https://github.com/python-semantic-release/python-semantic-release/issues/311 .. _ac241ed: https://github.com/python-semantic-release/python-semantic-release/commit/ac241edf4de39f4fc0ff561a749fa85caaf9e2ae .. _e2d8e47: https://github.com/python-semantic-release/python-semantic-release/commit/e2d8e47d2b02860881381318dcc088e150c0fcde .. _PR#314: https://github.com/python-semantic-release/python-semantic-release/pull/314 .. _changelog-v7.11.0: v7.11.0 (2021-01-08) ==================== ✨ Features ----------- * **print-version**: Add print-version command to output version (`512e3d9`_) 🪲 Bug Fixes ------------ * Add dot to --define option help (`eb4107d`_) * Avoid Unknown bump level 0 message (`8ab624c`_) * **actions**: Fix github actions with new main location (`6666672`_) ⚙️ Build System ---------------- * Add __main__.py magic file (`e93f36a`_) .. _512e3d9: https://github.com/python-semantic-release/python-semantic-release/commit/512e3d92706055bdf8d08b7c82927d3530183079 .. _6666672: https://github.com/python-semantic-release/python-semantic-release/commit/6666672d3d97ab7cdf47badfa3663f1a69c2dbdf .. _8ab624c: https://github.com/python-semantic-release/python-semantic-release/commit/8ab624cf3508b57a9656a0a212bfee59379d6f8b .. _e93f36a: https://github.com/python-semantic-release/python-semantic-release/commit/e93f36a7a10e48afb42c1dc3d860a5e2a07cf353 .. _eb4107d: https://github.com/python-semantic-release/python-semantic-release/commit/eb4107d2efdf8c885c8ae35f48f1b908d1fced32 .. _changelog-v7.10.0: v7.10.0 (2021-01-08) ==================== ✨ Features ----------- * **build**: Allow falsy values for build_command to disable build step (`c07a440`_) 📖 Documentation ---------------- * Fix incorrect reference syntax (`42027f0`_) * Rewrite getting started page (`97a9046`_) .. _42027f0: https://github.com/python-semantic-release/python-semantic-release/commit/42027f0d2bb64f4c9eaec65112bf7b6f67568e60 .. _97a9046: https://github.com/python-semantic-release/python-semantic-release/commit/97a90463872502d1207890ae1d9dd008b1834385 .. _c07a440: https://github.com/python-semantic-release/python-semantic-release/commit/c07a440f2dfc45a2ad8f7c454aaac180c4651f70 .. _changelog-v7.9.0: v7.9.0 (2020-12-21) =================== ✨ Features ----------- * **hvcs**: Add hvcs_domain config option, closes `#277`_ (`ab3061a`_) 🪲 Bug Fixes ------------ * **history**: Coerce version to string (`PR#298`_, `d4cdc3d`_) * **history**: Require semver >= 2.10 (`5087e54`_) .. _#277: https://github.com/python-semantic-release/python-semantic-release/issues/277 .. _5087e54: https://github.com/python-semantic-release/python-semantic-release/commit/5087e549399648cf2e23339a037b33ca8b62d954 .. _ab3061a: https://github.com/python-semantic-release/python-semantic-release/commit/ab3061ae93c49d71afca043b67b361e2eb2919e6 .. _d4cdc3d: https://github.com/python-semantic-release/python-semantic-release/commit/d4cdc3d3cd2d93f2a78f485e3ea107ac816c7d00 .. _PR#298: https://github.com/python-semantic-release/python-semantic-release/pull/298 .. _changelog-v7.8.2: v7.8.2 (2020-12-19) =================== ✨ Features ----------- * **repository**: Add to settings artifact repository (`f4ef373`_) 🪲 Bug Fixes ------------ * **cli**: Skip remove_dist where not needed (`04817d4`_) .. _04817d4: https://github.com/python-semantic-release/python-semantic-release/commit/04817d4ecfc693195e28c80455bfbb127485f36b .. _f4ef373: https://github.com/python-semantic-release/python-semantic-release/commit/f4ef3733b948282fba5a832c5c0af134609b26d2 .. _changelog-v7.8.1: v7.8.1 (2020-12-18) =================== 🪲 Bug Fixes ------------ * Filenames with unknown mimetype are now properly uploaded to github release (`f3ece78`_) * **logs**: Fix TypeError when enabling debug logs (`2591a94`_) .. _2591a94: https://github.com/python-semantic-release/python-semantic-release/commit/2591a94115114c4a91a48f5b10b3954f6ac932a1 .. _f3ece78: https://github.com/python-semantic-release/python-semantic-release/commit/f3ece78b2913e70f6b99907b192a1e92bbfd6b77 .. _changelog-v7.8.0: v7.8.0 (2020-12-18) =================== ✨ Features ----------- * Add ``upload_to_pypi_glob_patterns`` option (`42305ed`_) 🪲 Bug Fixes ------------ * **changelog**: Use "issues" link vs "pull" (`93e48c9`_) * **netrc**: Prefer using token defined in GH_TOKEN instead of .netrc file (`3af32a7`_) .. _3af32a7: https://github.com/python-semantic-release/python-semantic-release/commit/3af32a738f2f2841fd75ec961a8f49a0b1c387cf .. _42305ed: https://github.com/python-semantic-release/python-semantic-release/commit/42305ed499ca08c819c4e7e65fcfbae913b8e6e1 .. _93e48c9: https://github.com/python-semantic-release/python-semantic-release/commit/93e48c992cb8b763f430ecbb0b7f9c3ca00036e4 .. _changelog-v7.7.0: v7.7.0 (2020-12-12) =================== ✨ Features ----------- * **changelog**: Add PR links in markdown (`PR#282`_, `0448f6c`_) .. _0448f6c: https://github.com/python-semantic-release/python-semantic-release/commit/0448f6c350bbbf239a81fe13dc5f45761efa7673 .. _PR#282: https://github.com/python-semantic-release/python-semantic-release/pull/282 .. _changelog-v7.6.0: v7.6.0 (2020-12-06) =================== ✨ Features ----------- * Add ``major_on_zero`` option (`d324154`_) 📖 Documentation ---------------- * Add documentation for option ``major_on_zero`` (`2e8b26e`_) .. _2e8b26e: https://github.com/python-semantic-release/python-semantic-release/commit/2e8b26e4ee0316a2cf2a93c09c783024fcd6b3ba .. _d324154: https://github.com/python-semantic-release/python-semantic-release/commit/d3241540e7640af911eb24c71e66468feebb0d46 .. _changelog-v7.5.0: v7.5.0 (2020-12-04) =================== ✨ Features ----------- * **logs**: Include scope in changelogs (`PR#281`_, `21c96b6`_) .. _21c96b6: https://github.com/python-semantic-release/python-semantic-release/commit/21c96b688cc44cc6f45af962ffe6d1f759783f37 .. _PR#281: https://github.com/python-semantic-release/python-semantic-release/pull/281 .. _changelog-v7.4.1: v7.4.1 (2020-12-04) =================== 🪲 Bug Fixes ------------ * Add "changelog_capitalize" to flags, closes `#278`_ (`PR#279`_, `37716df`_) .. _#278: https://github.com/python-semantic-release/python-semantic-release/issues/278 .. _37716df: https://github.com/python-semantic-release/python-semantic-release/commit/37716dfa78eb3f848f57a5100d01d93f5aafc0bf .. _PR#279: https://github.com/python-semantic-release/python-semantic-release/pull/279 .. _changelog-v7.4.0: v7.4.0 (2020-11-24) =================== ✨ Features ----------- * Add changelog_capitalize configuration, closes `#260`_ (`7cacca1`_) 📖 Documentation ---------------- * Fix broken internal references (`PR#270`_, `da20b9b`_) * Update links to Github docs (`PR#268`_, `c53162e`_) .. _#260: https://github.com/python-semantic-release/python-semantic-release/issues/260 .. _7cacca1: https://github.com/python-semantic-release/python-semantic-release/commit/7cacca1eb436a7166ba8faf643b53c42bc32a6a7 .. _c53162e: https://github.com/python-semantic-release/python-semantic-release/commit/c53162e366304082a3bd5d143b0401da6a16a263 .. _da20b9b: https://github.com/python-semantic-release/python-semantic-release/commit/da20b9bdd3c7c87809c25ccb2a5993a7ea209a22 .. _PR#268: https://github.com/python-semantic-release/python-semantic-release/pull/268 .. _PR#270: https://github.com/python-semantic-release/python-semantic-release/pull/270 .. _changelog-v7.3.0: v7.3.0 (2020-09-28) =================== ✨ Features ----------- * Generate ``changelog.md`` file (`PR#266`_, `2587dfe`_) 📖 Documentation ---------------- * Fix docstring (`5a5e2cf`_) .. _2587dfe: https://github.com/python-semantic-release/python-semantic-release/commit/2587dfed71338ec6c816f58cdf0882382c533598 .. _5a5e2cf: https://github.com/python-semantic-release/python-semantic-release/commit/5a5e2cfb5e6653fb2e95e6e23e56559953b2c2b4 .. _PR#266: https://github.com/python-semantic-release/python-semantic-release/pull/266 .. _changelog-v7.2.5: v7.2.5 (2020-09-16) =================== 🪲 Bug Fixes ------------ * Add required to inputs in action metadata (`PR#264`_, `e76b255`_) .. _e76b255: https://github.com/python-semantic-release/python-semantic-release/commit/e76b255cf7d3d156e3314fc28c54d63fa126e973 .. _PR#264: https://github.com/python-semantic-release/python-semantic-release/pull/264 .. _changelog-v7.2.4: v7.2.4 (2020-09-14) =================== 🪲 Bug Fixes ------------ * Use range for toml dependency, closes `#241`_ (`45707e1`_) .. _#241: https://github.com/python-semantic-release/python-semantic-release/issues/241 .. _45707e1: https://github.com/python-semantic-release/python-semantic-release/commit/45707e1b7dcab48103a33de9d7f9fdb5a34dae4a .. _changelog-v7.2.3: v7.2.3 (2020-09-12) =================== 🪲 Bug Fixes ------------ * Support multiline version_pattern matching by default (`82f7849`_) 📖 Documentation ---------------- * Create 'getting started' instructions (`PR#256`_, `5f4d000`_) * Link to getting started guide in README (`f490e01`_) .. _5f4d000: https://github.com/python-semantic-release/python-semantic-release/commit/5f4d000c3f153d1d23128acf577e389ae879466e .. _82f7849: https://github.com/python-semantic-release/python-semantic-release/commit/82f7849dcf29ba658e0cb3b5d21369af8bf3c16f .. _f490e01: https://github.com/python-semantic-release/python-semantic-release/commit/f490e0194fa818db4d38c185bc5e6245bfde546b .. _PR#256: https://github.com/python-semantic-release/python-semantic-release/pull/256 .. _changelog-v7.2.2: v7.2.2 (2020-07-26) =================== 🪲 Bug Fixes ------------ * **changelog**: Send changelog to stdout, closes `#250`_ (`87e2bb8`_) 📖 Documentation ---------------- * Add quotation marks to the pip commands in CONTRIBUTING.rst (`PR#253`_, `e20fa43`_) .. _#250: https://github.com/python-semantic-release/python-semantic-release/issues/250 .. _87e2bb8: https://github.com/python-semantic-release/python-semantic-release/commit/87e2bb881387ff3ac245ab9923347a5a616e197b .. _e20fa43: https://github.com/python-semantic-release/python-semantic-release/commit/e20fa43098c06f5f585c81b9cd7e287dcce3fb5d .. _PR#253: https://github.com/python-semantic-release/python-semantic-release/pull/253 .. _changelog-v7.2.1: v7.2.1 (2020-06-29) =================== 🪲 Bug Fixes ------------ * Commit all files with bumped versions (`PR#249`_, `b3a1766`_) 📖 Documentation ---------------- * Give example of multiple build commands (`PR#248`_, `65f1ffc`_) .. _65f1ffc: https://github.com/python-semantic-release/python-semantic-release/commit/65f1ffcc6cac3bf382f4b821ff2be59d04f9f867 .. _b3a1766: https://github.com/python-semantic-release/python-semantic-release/commit/b3a1766be7edb7d2eb76f2726d35ab8298688b3b .. _PR#248: https://github.com/python-semantic-release/python-semantic-release/pull/248 .. _PR#249: https://github.com/python-semantic-release/python-semantic-release/pull/249 .. _changelog-v7.2.0: v7.2.0 (2020-06-15) =================== ✨ Features ----------- * Bump versions in multiple files, closes `#175`_ (`PR#246`_, `0ba2c47`_) .. _#175: https://github.com/python-semantic-release/python-semantic-release/issues/175 .. _0ba2c47: https://github.com/python-semantic-release/python-semantic-release/commit/0ba2c473c6e44cc326b3299b6ea3ddde833bdb37 .. _PR#246: https://github.com/python-semantic-release/python-semantic-release/pull/246 .. _changelog-v7.1.1: v7.1.1 (2020-05-28) =================== 🪲 Bug Fixes ------------ * **changelog**: Swap sha and message in table changelog (`6741370`_) .. _6741370: https://github.com/python-semantic-release/python-semantic-release/commit/6741370ab09b1706ff6e19b9fbe57b4bddefc70d .. _changelog-v7.1.0: v7.1.0 (2020-05-24) =================== ✨ Features ----------- * **changelog**: Add changelog_table component, closes `#237`_ (`PR#242`_, `fe6a7e7`_) .. _#237: https://github.com/python-semantic-release/python-semantic-release/issues/237 .. _fe6a7e7: https://github.com/python-semantic-release/python-semantic-release/commit/fe6a7e7fa014ffb827a1430dbcc10d1fc84c886b .. _PR#242: https://github.com/python-semantic-release/python-semantic-release/pull/242 .. _changelog-v7.0.0: v7.0.0 (2020-05-22) =================== ✨ Features ----------- * Pass changelog_sections to components (`PR#240`_, `3e17a98`_) * **changelog**: Add changelog components (`PR#240`_, `3e17a98`_) 📖 Documentation ---------------- * Add conda-forge badge (`e9536bb`_) * Add documentation for changelog_components (`PR#240`_, `3e17a98`_) 💥 BREAKING CHANGES ------------------- * **changelog**: The ``compare_url`` option has been removed in favor of using ``changelog_components``. This functionality is now available as the ``semantic_release.changelog.compare_url`` component. .. _3e17a98: https://github.com/python-semantic-release/python-semantic-release/commit/3e17a98d7fa8468868a87e62651ac2c010067711 .. _e9536bb: https://github.com/python-semantic-release/python-semantic-release/commit/e9536bbe119c9e3b90c61130c02468e0e1f14141 .. _PR#240: https://github.com/python-semantic-release/python-semantic-release/pull/240 .. _changelog-v6.4.1: v6.4.1 (2020-05-15) =================== 🪲 Bug Fixes ------------ * Convert ``\r\n`` to ``\n`` in commit messages, closes `#239`_ (`34acbbc`_) .. _#239: https://github.com/python-semantic-release/python-semantic-release/issues/239 .. _34acbbc: https://github.com/python-semantic-release/python-semantic-release/commit/34acbbcd25320a9d18dcd1a4f43e1ce1837b2c9f .. _changelog-v6.4.0: v6.4.0 (2020-05-15) =================== ✨ Features ----------- * **history**: Create emoji parser (`PR#238`_, `2e1c50a`_) 🪲 Bug Fixes ------------ * Add emojis to default changelog_sections (`PR#238`_, `2e1c50a`_) * Include all parsed types in changelog (`PR#238`_, `2e1c50a`_) 📖 Documentation ---------------- * Add documentation for emoji parser (`PR#238`_, `2e1c50a`_) ♻️ Refactoring --------------- * **history**: Get breaking changes in parser (`PR#238`_, `2e1c50a`_) .. _2e1c50a: https://github.com/python-semantic-release/python-semantic-release/commit/2e1c50a865628b372f48945a039a3edb38a7cdf0 .. _PR#238: https://github.com/python-semantic-release/python-semantic-release/pull/238 .. _changelog-v6.3.1: v6.3.1 (2020-05-11) =================== 🪲 Bug Fixes ------------ * Use getboolean for commit_version_number, closes `#186`_ (`a60e0b4`_) .. _#186: https://github.com/python-semantic-release/python-semantic-release/issues/186 .. _a60e0b4: https://github.com/python-semantic-release/python-semantic-release/commit/a60e0b4e3cadf310c3e0ad67ebeb4e69d0ee50cb .. _changelog-v6.3.0: v6.3.0 (2020-05-09) =================== ✨ Features ----------- * **history**: Support linking compare page in changelog, closes `#218`_ (`79a8e02`_) 📖 Documentation ---------------- * Document compare_link option (`e52c355`_) * Rewrite commit-log-parsing.rst (`4c70f4f`_) .. _#218: https://github.com/python-semantic-release/python-semantic-release/issues/218 .. _4c70f4f: https://github.com/python-semantic-release/python-semantic-release/commit/4c70f4f2aa3343c966d1b7ab8566fcc782242ab9 .. _79a8e02: https://github.com/python-semantic-release/python-semantic-release/commit/79a8e02df82fbc2acecaad9e9ff7368e61df3e54 .. _e52c355: https://github.com/python-semantic-release/python-semantic-release/commit/e52c355c0d742ddd2cfa65d42888296942e5bec5 .. _changelog-v6.2.0: v6.2.0 (2020-05-02) =================== ✨ Features ----------- * **history**: Check all paragraphs for breaking changes, closes `#200`_ (`fec08f0`_) 📖 Documentation ---------------- * Add = to verbosity option, closes `#227`_ (`a0f4c9c`_) * Use references where possible, closes `#221`_ (`f38e5d4`_) .. _#200: https://github.com/python-semantic-release/python-semantic-release/issues/200 .. _#221: https://github.com/python-semantic-release/python-semantic-release/issues/221 .. _#227: https://github.com/python-semantic-release/python-semantic-release/issues/227 .. _a0f4c9c: https://github.com/python-semantic-release/python-semantic-release/commit/a0f4c9cd397fcb98f880097319c08160adb3c3e6 .. _f38e5d4: https://github.com/python-semantic-release/python-semantic-release/commit/f38e5d4a1597cddb69ce47a4d79b8774e796bf41 .. _fec08f0: https://github.com/python-semantic-release/python-semantic-release/commit/fec08f0dbd7ae15f95ca9c41a02c9fe6d448ede0 .. _changelog-v6.1.0: v6.1.0 (2020-04-26) =================== ✨ Features ----------- * **actions**: Support PYPI_TOKEN on GitHub Actions (`df2c080`_) * **pypi**: Support easier use of API tokens, closes `#213`_ (`bac135c`_) 📖 Documentation ---------------- * Add documentation for PYPI_TOKEN (`a8263a0`_) .. _#213: https://github.com/python-semantic-release/python-semantic-release/issues/213 .. _a8263a0: https://github.com/python-semantic-release/python-semantic-release/commit/a8263a066177d1d42f2844e4cb42a76a23588500 .. _bac135c: https://github.com/python-semantic-release/python-semantic-release/commit/bac135c0ae7a6053ecfc7cdf2942c3c89640debf .. _df2c080: https://github.com/python-semantic-release/python-semantic-release/commit/df2c0806f0a92186e914cfc8cc992171d74422df .. _changelog-v6.0.1: v6.0.1 (2020-04-15) =================== 🪲 Bug Fixes ------------ * **hvcs**: Convert get_hvcs to use LoggedFunction (`3084249`_) .. _3084249: https://github.com/python-semantic-release/python-semantic-release/commit/308424933fd3375ca3730d9eaf8abbad2435830b .. _changelog-v6.0.0: v6.0.0 (2020-04-15) =================== 📖 Documentation ---------------- * Create Read the Docs config file (`aa5a1b7`_) * Include README.rst in index.rst (`8673a9d`_) * Move action.rst into main documentation (`509ccaf`_) * Rewrite README.rst (`e049772`_) * Rewrite troubleshooting page (`0285de2`_) ♻️ Refactoring --------------- * **debug**: Use logging and click_log instead of ndebug (`15b1f65`_) 💥 BREAKING CHANGES ------------------- * **debug**: ``debug="*"`` no longer has an effect, instead use ``--verbosity DEBUG``. .. _0285de2: https://github.com/python-semantic-release/python-semantic-release/commit/0285de215a8dac3fcc9a51f555fa45d476a56dff .. _15b1f65: https://github.com/python-semantic-release/python-semantic-release/commit/15b1f650f29761e1ab2a91b767cbff79b2057a4c .. _509ccaf: https://github.com/python-semantic-release/python-semantic-release/commit/509ccaf307a0998eced69ad9fee1807132babe28 .. _8673a9d: https://github.com/python-semantic-release/python-semantic-release/commit/8673a9d92a9bf348bb3409e002a830741396c8ca .. _aa5a1b7: https://github.com/python-semantic-release/python-semantic-release/commit/aa5a1b700a1c461c81c6434686cb6f0504c4bece .. _e049772: https://github.com/python-semantic-release/python-semantic-release/commit/e049772cf14cdd49538cf357db467f0bf3fe9587 .. _changelog-v5.2.0: v5.2.0 (2020-04-09) =================== ✨ Features ----------- * **github**: Add tag as default release name (`2997908`_) 📖 Documentation ---------------- * Automate API docs (`7d4fea2`_) .. _2997908: https://github.com/python-semantic-release/python-semantic-release/commit/2997908f80f4fcec56917d237a079b961a06f990 .. _7d4fea2: https://github.com/python-semantic-release/python-semantic-release/commit/7d4fea266cc75007de51609131eb6d1e324da608 .. _changelog-v5.1.0: v5.1.0 (2020-04-04) =================== ✨ Features ----------- * **history**: Allow customizing changelog_sections (`PR#207`_, `d5803d5`_) 📖 Documentation ---------------- * Improve formatting of configuration page (`9a8e22e`_) * Improve formatting of envvars page (`b376a56`_) * Update index.rst (`b27c26c`_) .. _9a8e22e: https://github.com/python-semantic-release/python-semantic-release/commit/9a8e22e838d7dbf3bfd941397c3b39560aca6451 .. _b27c26c: https://github.com/python-semantic-release/python-semantic-release/commit/b27c26c66e7e41843ab29076f7e724908091b46e .. _b376a56: https://github.com/python-semantic-release/python-semantic-release/commit/b376a567bfd407a507ce0752614b0ca75a0f2973 .. _d5803d5: https://github.com/python-semantic-release/python-semantic-release/commit/d5803d5c1668d86482a31ac0853bac7ecfdc63bc .. _PR#207: https://github.com/python-semantic-release/python-semantic-release/pull/207 .. _changelog-v5.0.3: v5.0.3 (2020-03-26) =================== 🪲 Bug Fixes ------------ * Bump dependencies and fix Windows issues on Development (`PR#173`_, `0a6f8c3`_) * Missing mime types on Windows (`PR#173`_, `0a6f8c3`_) .. _0a6f8c3: https://github.com/python-semantic-release/python-semantic-release/commit/0a6f8c3842b05f5f424dad5ce1fa5e3823c7e688 .. _PR#173: https://github.com/python-semantic-release/python-semantic-release/pull/173 .. _changelog-v5.0.2: v5.0.2 (2020-03-22) =================== 🪲 Bug Fixes ------------ * **history**: Leave case of other characters unchanged (`96ba94c`_) .. _96ba94c: https://github.com/python-semantic-release/python-semantic-release/commit/96ba94c4b4593997343ec61ecb6c823c1494d0e2 .. _changelog-v5.0.1: v5.0.1 (2020-03-22) =================== 🪲 Bug Fixes ------------ * Make action use current version of semantic-release (`123984d`_) .. _123984d: https://github.com/python-semantic-release/python-semantic-release/commit/123984d735181c622f3d99088a1ad91321192a11 .. _changelog-v5.0.0: v5.0.0 (2020-03-22) =================== ✨ Features ----------- * **build**: Allow config setting for build command, closes `#188`_ (`PR#195`_, `740f4bd`_) 🪲 Bug Fixes ------------ * Rename default of build_command config (`d5db22f`_) 📖 Documentation ---------------- * **pypi**: Update docstrings in pypi.py (`6502d44`_) 💥 BREAKING CHANGES ------------------- * **build**: Previously the build_commands configuration variable set the types of bundles sent to ``python setup.py``. It has been replaced by the configuration variable ``build_command`` which takes the full command e.g. ``python setup.py sdist`` or ``poetry build``. .. _#188: https://github.com/python-semantic-release/python-semantic-release/issues/188 .. _6502d44: https://github.com/python-semantic-release/python-semantic-release/commit/6502d448fa65e5dc100e32595e83fff6f62a881a .. _740f4bd: https://github.com/python-semantic-release/python-semantic-release/commit/740f4bdb26569362acfc80f7e862fc2c750a46dd .. _d5db22f: https://github.com/python-semantic-release/python-semantic-release/commit/d5db22f9f7acd05d20fd60a8b4b5a35d4bbfabb8 .. _PR#195: https://github.com/python-semantic-release/python-semantic-release/pull/195 .. _changelog-v4.11.0: v4.11.0 (2020-03-22) ==================== ✨ Features ----------- * **actions**: Create GitHub Action (`350245d`_) 📖 Documentation ---------------- * Make AUTHORS.rst dynamic (`db2e076`_) * **readme**: Fix minor typo (`c22f69f`_) .. _350245d: https://github.com/python-semantic-release/python-semantic-release/commit/350245dbfb07ed6a1db017b1d9d1072b368b1497 .. _c22f69f: https://github.com/python-semantic-release/python-semantic-release/commit/c22f69f62a215ff65e1ab6dcaa8e7e9662692e64 .. _db2e076: https://github.com/python-semantic-release/python-semantic-release/commit/db2e0762f3189d0f1a6ba29aad32bdefb7e0187f .. _changelog-v4.10.0: v4.10.0 (2020-03-03) ==================== ✨ Features ----------- * Make commit message configurable (`PR#184`_, `eb0762c`_) .. _eb0762c: https://github.com/python-semantic-release/python-semantic-release/commit/eb0762ca9fea5cecd5c7b182504912a629be473b .. _PR#184: https://github.com/python-semantic-release/python-semantic-release/pull/184 .. _changelog-v4.9.0: v4.9.0 (2020-03-02) =================== ✨ Features ----------- * **pypi**: Add build_commands config (`22146ea`_) 🪲 Bug Fixes ------------ * **pypi**: Change bdist_wheels to bdist_wheel (`c4db509`_) .. _22146ea: https://github.com/python-semantic-release/python-semantic-release/commit/22146ea4b94466a90d60b94db4cc65f46da19197 .. _c4db509: https://github.com/python-semantic-release/python-semantic-release/commit/c4db50926c03f3d551c8331932c567c7bdaf4f3d .. _changelog-v4.8.0: v4.8.0 (2020-02-28) =================== ✨ Features ----------- * **git**: Add a new config for commit author (`aa2c22c`_) .. _aa2c22c: https://github.com/python-semantic-release/python-semantic-release/commit/aa2c22c469448fe57f02bea67a02f998ce519ac3 .. _changelog-v4.7.1: v4.7.1 (2020-02-28) =================== 🪲 Bug Fixes ------------ * Repair parsing of remotes in the gitlab ci format, closes `#181`_ (`0fddbe2`_) .. _#181: https://github.com/python-semantic-release/python-semantic-release/issues/181 .. _0fddbe2: https://github.com/python-semantic-release/python-semantic-release/commit/0fddbe2fb70d24c09ceddb789a159162a45942dc .. _changelog-v4.7.0: v4.7.0 (2020-02-28) =================== ✨ Features ----------- * Upload distribution files to GitHub Releases (`PR#177`_, `e427658`_) * **github**: Upload dists to release (`PR#177`_, `e427658`_) 🪲 Bug Fixes ------------ * Post changelog after PyPI upload (`PR#177`_, `e427658`_) * Support repository owner names containing dots, closes `#179`_ (`a6c4da4`_) * **github**: Fix upload of .whl files (`PR#177`_, `e427658`_) * **github**: Use application/octet-stream for .whl files (`90a7e47`_) 📖 Documentation ---------------- * Document upload_to_release config option (`PR#177`_, `e427658`_) .. _#179: https://github.com/python-semantic-release/python-semantic-release/issues/179 .. _90a7e47: https://github.com/python-semantic-release/python-semantic-release/commit/90a7e476a04d26babc88002e9035cad2ed485b07 .. _a6c4da4: https://github.com/python-semantic-release/python-semantic-release/commit/a6c4da4c0e6bd8a37f64544f7813fa027f5054ed .. _e427658: https://github.com/python-semantic-release/python-semantic-release/commit/e427658e33abf518191498c3142a0f18d3150e07 .. _PR#177: https://github.com/python-semantic-release/python-semantic-release/pull/177 .. _changelog-v4.6.0: v4.6.0 (2020-02-19) =================== ✨ Features ----------- * **history**: Capitalize changelog messages (`1a8e306`_) 🪲 Bug Fixes ------------ * Add more debug statements in logs (`bc931ec`_) * Only overwrite with patch if bump is None, closes `#159`_ (`1daa4e2`_) .. _#159: https://github.com/python-semantic-release/python-semantic-release/issues/159 .. _1a8e306: https://github.com/python-semantic-release/python-semantic-release/commit/1a8e3060b8f6d6362c27903dcfc69d17db5f1d36 .. _1daa4e2: https://github.com/python-semantic-release/python-semantic-release/commit/1daa4e23ec2dd40c6b490849276524264787e24e .. _bc931ec: https://github.com/python-semantic-release/python-semantic-release/commit/bc931ec46795fde4c1ccee004eec83bf73d5de7a .. _changelog-v4.5.1: v4.5.1 (2020-02-16) =================== 🪲 Bug Fixes ------------ * **github**: Send token in request header, closes `#167`_ (`be9972a`_) 📖 Documentation ---------------- * Add note about automatic releases in readme (`e606e75`_) * Fix broken list in readme (`7aa572b`_) * Update readme and getting started docs (`07b3208`_) .. _#167: https://github.com/python-semantic-release/python-semantic-release/issues/167 .. _07b3208: https://github.com/python-semantic-release/python-semantic-release/commit/07b3208ff64301e544c4fdcb48314e49078fc479 .. _7aa572b: https://github.com/python-semantic-release/python-semantic-release/commit/7aa572b2a323ddbc69686309226395f40c52b469 .. _be9972a: https://github.com/python-semantic-release/python-semantic-release/commit/be9972a7b1fb183f738fb31bd370adb30281e4d5 .. _e606e75: https://github.com/python-semantic-release/python-semantic-release/commit/e606e7583a30167cf7679c6bcada2f9e768b3abe .. _changelog-v4.5.0: v4.5.0 (2020-02-08) =================== ✨ Features ----------- * **history**: Enable colon defined version, closes `#165`_ (`7837f50`_) 🪲 Bug Fixes ------------ * Remove erroneous submodule (`762bfda`_) * **cli**: --noop flag works when before command, closes `#73`_ (`4fcc781`_) .. _#73: https://github.com/python-semantic-release/python-semantic-release/issues/73 .. _#165: https://github.com/python-semantic-release/python-semantic-release/issues/165 .. _4fcc781: https://github.com/python-semantic-release/python-semantic-release/commit/4fcc781d1a3f9235db552f0f4431c9f5e638d298 .. _762bfda: https://github.com/python-semantic-release/python-semantic-release/commit/762bfda728c266b8cd14671d8da9298fc99c63fb .. _7837f50: https://github.com/python-semantic-release/python-semantic-release/commit/7837f5036269328ef29996b9ea63cccd5a6bc2d5 .. _changelog-v4.4.1: v4.4.1 (2020-01-18) =================== 🪲 Bug Fixes ------------ * Add quotes around twine arguments, closes `#163`_ (`46a83a9`_) .. _#163: https://github.com/python-semantic-release/python-semantic-release/issues/163 .. _46a83a9: https://github.com/python-semantic-release/python-semantic-release/commit/46a83a94b17c09d8f686c3ae7b199d7fd0e0e5e5 .. _changelog-v4.4.0: v4.4.0 (2020-01-17) =================== ✨ Features ----------- * **parser**: Add support for exclamation point for breaking changes, closes `#156`_ (`a4f8a10`_) * **parser**: Make BREAKING-CHANGE synonymous with BREAKING CHANGE (`beedccf`_) 🪲 Bug Fixes ------------ * **github**: Add check for GITHUB_ACTOR for git push (`PR#162`_, `c41e9bb`_) .. _#156: https://github.com/python-semantic-release/python-semantic-release/issues/156 .. _a4f8a10: https://github.com/python-semantic-release/python-semantic-release/commit/a4f8a10afcc358a8fbef83be2041129480350be2 .. _beedccf: https://github.com/python-semantic-release/python-semantic-release/commit/beedccfddfb360aeebef595342ee980446012ec7 .. _c41e9bb: https://github.com/python-semantic-release/python-semantic-release/commit/c41e9bb986d01b92d58419cbdc88489d630a11f1 .. _PR#162: https://github.com/python-semantic-release/python-semantic-release/pull/162 .. _changelog-v4.3.4: v4.3.4 (2019-12-17) =================== 🪲 Bug Fixes ------------ * Fallback to whole log if correct tag is not available, closes `#51`_ (`PR#157`_, `252bffd`_) .. _#51: https://github.com/python-semantic-release/python-semantic-release/issues/51 .. _252bffd: https://github.com/python-semantic-release/python-semantic-release/commit/252bffd3be7b6dfcfdb384d24cb1cd83d990fc9a .. _PR#157: https://github.com/python-semantic-release/python-semantic-release/pull/157 .. _changelog-v4.3.3: v4.3.3 (2019-11-06) =================== 🪲 Bug Fixes ------------ * Instead of requiring click 7.0, looks like all tests will pass with at least 2.0. (`PR#155`_, `f07c7f6`_) * Set version of click to >=2.0,<8.0. (`PR#155`_, `f07c7f6`_) * Upgrade to click 7.0, closes `#117`_ (`PR#155`_, `f07c7f6`_) .. _#117: https://github.com/python-semantic-release/python-semantic-release/issues/117 .. _f07c7f6: https://github.com/python-semantic-release/python-semantic-release/commit/f07c7f653be1c018e443f071d9a196d9293e9521 .. _PR#155: https://github.com/python-semantic-release/python-semantic-release/pull/155 .. _changelog-v4.3.2: v4.3.2 (2019-10-05) =================== 🪲 Bug Fixes ------------ * Update regex to get repository owner and name for project with dots, closes `#151`_ (`2778e31`_) .. _#151: https://github.com/python-semantic-release/python-semantic-release/issues/151 .. _2778e31: https://github.com/python-semantic-release/python-semantic-release/commit/2778e316a0c0aa931b1012cb3862d04659c05e73 .. _changelog-v4.3.1: v4.3.1 (2019-09-29) =================== 🪲 Bug Fixes ------------ * Support repo urls without git terminator (`700e9f1`_) .. _700e9f1: https://github.com/python-semantic-release/python-semantic-release/commit/700e9f18dafde1833f482272a72bb80b54d56bb3 .. _changelog-v4.3.0: v4.3.0 (2019-09-06) =================== ✨ Features ----------- * Add the possibility to load configuration from pyproject.toml (`35f8bfe`_) * Allow the override of configuration options from cli, closes `#119`_ (`f0ac82f`_) * Allow users to get version from tag and write/commit bump to file, closes `#104`_ (`1f9fe1c`_) * Make the vcs functionalities work with gitlab, closes `#121`_ (`82d555d`_) 🪲 Bug Fixes ------------ * Manage subgroups in git remote url, closes `#139`_, `#140`_ (`4b11875`_) * Update list of commit types to include build, ci and perf, closes `#145`_ (`41ea12f`_) .. _#104: https://github.com/python-semantic-release/python-semantic-release/issues/104 .. _#119: https://github.com/python-semantic-release/python-semantic-release/issues/119 .. _#121: https://github.com/python-semantic-release/python-semantic-release/issues/121 .. _#139: https://github.com/python-semantic-release/python-semantic-release/issues/139 .. _#140: https://github.com/python-semantic-release/python-semantic-release/issues/140 .. _#145: https://github.com/python-semantic-release/python-semantic-release/issues/145 .. _1f9fe1c: https://github.com/python-semantic-release/python-semantic-release/commit/1f9fe1cc7666d47cc0c348c4705b63c39bf10ecc .. _35f8bfe: https://github.com/python-semantic-release/python-semantic-release/commit/35f8bfef443c8b69560c918f4b13bc766fb3daa2 .. _41ea12f: https://github.com/python-semantic-release/python-semantic-release/commit/41ea12fa91f97c0046178806bce3be57c3bc2308 .. _4b11875: https://github.com/python-semantic-release/python-semantic-release/commit/4b118754729094e330389712cf863e1c6cefee69 .. _82d555d: https://github.com/python-semantic-release/python-semantic-release/commit/82d555d45b9d9e295ef3f9546a6ca2a38ca4522e .. _f0ac82f: https://github.com/python-semantic-release/python-semantic-release/commit/f0ac82fe59eb59a768a73a1bf2ea934b9d448c58 .. _changelog-v4.2.0: v4.2.0 (2019-08-05) =================== ✨ Features ----------- * Add configuration to customize handling of dists, closes `#115`_ (`2af6f41`_) * Add support for configuring branch, closes `#43`_ (`14abb05`_) * Add support for showing unreleased changelog, closes `#134`_ (`41ef794`_) 🪲 Bug Fixes ------------ * Add commit hash when generating breaking changes, closes `#120`_ (`0c74faf`_) * Kept setting new version for tag source (`0e24a56`_) * Remove deletion of build folder, closes `#115`_ (`b45703d`_) * Updated the tag tests (`3303eef`_) * Upgrade click to 7.0 (`2c5dd80`_) .. _#43: https://github.com/python-semantic-release/python-semantic-release/issues/43 .. _#115: https://github.com/python-semantic-release/python-semantic-release/issues/115 .. _#120: https://github.com/python-semantic-release/python-semantic-release/issues/120 .. _#134: https://github.com/python-semantic-release/python-semantic-release/issues/134 .. _0c74faf: https://github.com/python-semantic-release/python-semantic-release/commit/0c74fafdfa81cf2e13db8f4dcf0a6f7347552504 .. _0e24a56: https://github.com/python-semantic-release/python-semantic-release/commit/0e24a5633f8f94b48da97b011634d4f9d84f7b4b .. _14abb05: https://github.com/python-semantic-release/python-semantic-release/commit/14abb05e7f878e88002f896812d66b4ea5c219d4 .. _2af6f41: https://github.com/python-semantic-release/python-semantic-release/commit/2af6f41b21205bdd192514a434fca2feba17725a .. _2c5dd80: https://github.com/python-semantic-release/python-semantic-release/commit/2c5dd809b84c2157a5e6cdcc773c43ec864f0328 .. _3303eef: https://github.com/python-semantic-release/python-semantic-release/commit/3303eefa49a0474bbd85df10ae186ccbf9090ec1 .. _41ef794: https://github.com/python-semantic-release/python-semantic-release/commit/41ef7947ad8a07392c96c7540980476e989c1d83 .. _b45703d: https://github.com/python-semantic-release/python-semantic-release/commit/b45703dad38c29b28575060b21e5fb0f8482c6b1 .. _changelog-v4.1.2: v4.1.2 (2019-08-04) =================== 🪲 Bug Fixes ------------ * Correct isort build fail (`0037210`_) * Make sure the history only breaks loop for version commit, closes `#135`_ (`5dc6cfc`_) * **vcs**: Allow cli to be run from subdirectory (`fb7bb14`_) 📖 Documentation ---------------- * **circleci**: Point badge to master branch (`9c7302e`_) .. _#135: https://github.com/python-semantic-release/python-semantic-release/issues/135 .. _0037210: https://github.com/python-semantic-release/python-semantic-release/commit/00372100b527ff9308d9e43fe5c65cdf179dc4dc .. _5dc6cfc: https://github.com/python-semantic-release/python-semantic-release/commit/5dc6cfc634254f09997bb3cb0f17abd296e2c01f .. _9c7302e: https://github.com/python-semantic-release/python-semantic-release/commit/9c7302e184a1bd88f39b3039691b55cd77f0bb07 .. _fb7bb14: https://github.com/python-semantic-release/python-semantic-release/commit/fb7bb14300e483626464795b8ff4f033a194cf6f .. _changelog-v4.1.1: v4.1.1 (2019-02-15) =================== 📖 Documentation ---------------- * Correct usage of changelog (`f4f59b0`_) * Debug usage and related (`f08e594`_) * Describing the commands (`b6fa04d`_) * Update url for commit guidelinesThe guidelines can now be found in theDEVELOPERS.md in angular. (`90c1b21`_) .. _90c1b21: https://github.com/python-semantic-release/python-semantic-release/commit/90c1b217f86263301b91d19d641c7b348e37d960 .. _b6fa04d: https://github.com/python-semantic-release/python-semantic-release/commit/b6fa04db3044525a1ee1b5952fb175a706842238 .. _f08e594: https://github.com/python-semantic-release/python-semantic-release/commit/f08e5943a9876f2d17a7c02f468720995c7d9ffd .. _f4f59b0: https://github.com/python-semantic-release/python-semantic-release/commit/f4f59b08c73700c6ee04930221bfcb1355cbc48d .. _changelog-v4.1.0: v4.1.0 (2019-01-31) =================== ✨ Features ----------- * **ci_checks**: Add support for bitbucket (`9fc120d`_) 🪲 Bug Fixes ------------ * Initialize git Repo from current folder (`c7415e6`_) * Maintain version variable formatting on bump (`PR#103`_, `bf63156`_) * Use same changelog code for command as post (`248f622`_) 📖 Documentation ---------------- * Add installation instructions for development (`PR#106`_, `9168d0e`_) * **readme**: Add testing instructions (`bb352f5`_) .. _248f622: https://github.com/python-semantic-release/python-semantic-release/commit/248f62283c59182868c43ff105a66d85c923a894 .. _9168d0e: https://github.com/python-semantic-release/python-semantic-release/commit/9168d0ea56734319a5d77e890f23ff6ba51cc97d .. _9fc120d: https://github.com/python-semantic-release/python-semantic-release/commit/9fc120d1a7e4acbbca609628e72651685108b364 .. _bb352f5: https://github.com/python-semantic-release/python-semantic-release/commit/bb352f5b6616cc42c9f2f2487c51dedda1c68295 .. _bf63156: https://github.com/python-semantic-release/python-semantic-release/commit/bf63156f60340614fae94c255fb2f097cf317b2b .. _c7415e6: https://github.com/python-semantic-release/python-semantic-release/commit/c7415e634c0affbe6396e0aa2bafe7c1b3368914 .. _PR#103: https://github.com/python-semantic-release/python-semantic-release/pull/103 .. _PR#106: https://github.com/python-semantic-release/python-semantic-release/pull/106 .. _changelog-v4.0.1: v4.0.1 (2019-01-12) =================== 🪲 Bug Fixes ------------ * Add better error message when pypi credentials are empty, closes `#96`_ (`c4e5dcb`_) * Clean out dist and build before building, closes `#86`_ (`b628e46`_) * Filter out pypi secrets from exceptions, closes `#41`_ (`5918371`_) * Unfreeze dependencies, closes `#100`_ (`847833b`_) * Use correct syntax to exclude tests in package, closes `#92`_ (`3e41e91`_) * **parser_angular**: Fix non-match when special chars in scope (`8a33123`_) 📖 Documentation ---------------- * Remove reference to gitter, closes `#90`_ (`896e37b`_) .. _#41: https://github.com/python-semantic-release/python-semantic-release/issues/41 .. _#86: https://github.com/python-semantic-release/python-semantic-release/issues/86 .. _#90: https://github.com/python-semantic-release/python-semantic-release/issues/90 .. _#92: https://github.com/python-semantic-release/python-semantic-release/issues/92 .. _#96: https://github.com/python-semantic-release/python-semantic-release/issues/96 .. _#100: https://github.com/python-semantic-release/python-semantic-release/issues/100 .. _3e41e91: https://github.com/python-semantic-release/python-semantic-release/commit/3e41e91c318663085cd28c8165ece21d7e383475 .. _5918371: https://github.com/python-semantic-release/python-semantic-release/commit/5918371c1e82b06606087c9945d8eaf2604a0578 .. _847833b: https://github.com/python-semantic-release/python-semantic-release/commit/847833bf48352a4935f906d0c3f75e1db596ca1c .. _896e37b: https://github.com/python-semantic-release/python-semantic-release/commit/896e37b95cc43218e8f593325dd4ea63f8b895d9 .. _8a33123: https://github.com/python-semantic-release/python-semantic-release/commit/8a331232621b26767e4268079f9295bf695047ab .. _b628e46: https://github.com/python-semantic-release/python-semantic-release/commit/b628e466f86bc27cbe45ec27a02d4774a0efd3bb .. _c4e5dcb: https://github.com/python-semantic-release/python-semantic-release/commit/c4e5dcbeda0ce8f87d25faefb4d9ae3581029a8f .. _changelog-v4.0.0: v4.0.0 (2018-11-22) =================== ✨ Features ----------- * Add support for commit_message config variable (`4de5400`_) * **CI checks**: Add support for GitLab CI checks, closes `#88`_ (`8df5e2b`_) 🪲 Bug Fixes ------------ * Add check of credentials (`7d945d4`_) * Add credentials check (`0694604`_) * Add dists to twine call (`1cec2df`_) * Change requests from fixed version to version range (`PR#93`_, `af3ad59`_) * Re-add skip-existing (`366e9c1`_) * Remove repository argument in twine (`e24543b`_) * Remove universal from setup config (`18b2402`_) * Update twine (`c4ae7b8`_) * Use new interface for twine (`c04872d`_) * Use twine through cli call (`ab84beb`_) 📖 Documentation ---------------- * Add type hints and more complete docstrings, closes `#81`_ (`a6d5e9b`_) * Fix typo in documentation index (`da6844b`_) ♻️ Refactoring --------------- * Remove support for python 2 (`85fe638`_) 💥 BREAKING CHANGES ------------------- * If you rely on the commit message to be the version number only, this will break your code * This will only work with python 3 after this commit. .. _#81: https://github.com/python-semantic-release/python-semantic-release/issues/81 .. _#88: https://github.com/python-semantic-release/python-semantic-release/issues/88 .. _0694604: https://github.com/python-semantic-release/python-semantic-release/commit/0694604f3b3d2159a4037620605ded09236cdef5 .. _18b2402: https://github.com/python-semantic-release/python-semantic-release/commit/18b24025e397aace03dd5bb9eed46cfdd13491bd .. _1cec2df: https://github.com/python-semantic-release/python-semantic-release/commit/1cec2df8bcb7f877c813d6470d454244630b050a .. _366e9c1: https://github.com/python-semantic-release/python-semantic-release/commit/366e9c1d0b9ffcde755407a1de18e8295f6ad3a1 .. _4de5400: https://github.com/python-semantic-release/python-semantic-release/commit/4de540011ab10483ee1865f99c623526cf961bb9 .. _7d945d4: https://github.com/python-semantic-release/python-semantic-release/commit/7d945d44b36b3e8c0b7771570cb2305e9e09d0b2 .. _85fe638: https://github.com/python-semantic-release/python-semantic-release/commit/85fe6384c15db317bc7142f4c8bbf2da58cece58 .. _8df5e2b: https://github.com/python-semantic-release/python-semantic-release/commit/8df5e2bdd33a620e683f3adabe174e94ceaa88d9 .. _a6d5e9b: https://github.com/python-semantic-release/python-semantic-release/commit/a6d5e9b1ccbe75d59e7240528593978a19d8d040 .. _ab84beb: https://github.com/python-semantic-release/python-semantic-release/commit/ab84beb8f809e39ae35cd3ce5c15df698d8712fd .. _af3ad59: https://github.com/python-semantic-release/python-semantic-release/commit/af3ad59f018876e11cc3acdda0b149f8dd5606bd .. _c04872d: https://github.com/python-semantic-release/python-semantic-release/commit/c04872d00a26e9bf0f48eeacb360b37ce0fba01e .. _c4ae7b8: https://github.com/python-semantic-release/python-semantic-release/commit/c4ae7b8ecc682855a8568b247690eaebe62d2d26 .. _da6844b: https://github.com/python-semantic-release/python-semantic-release/commit/da6844bce0070a0020bf13950bd136fe28262602 .. _e24543b: https://github.com/python-semantic-release/python-semantic-release/commit/e24543b96adb208897f4ce3eaab96b2f4df13106 .. _PR#93: https://github.com/python-semantic-release/python-semantic-release/pull/93 .. _changelog-v3.11.2: v3.11.2 (2018-06-10) ==================== 🪲 Bug Fixes ------------ * Upgrade twine (`9722313`_) .. _9722313: https://github.com/python-semantic-release/python-semantic-release/commit/9722313eb63c7e2c32c084ad31bed7ee1c48a928 .. _changelog-v3.11.1: v3.11.1 (2018-06-06) ==================== 🪲 Bug Fixes ------------ * Change Gitpython version number, closes `#80`_ (`23c9d4b`_) 📖 Documentation ---------------- * Add retry option to cli docs (`021da50`_) .. _#80: https://github.com/python-semantic-release/python-semantic-release/issues/80 .. _021da50: https://github.com/python-semantic-release/python-semantic-release/commit/021da5001934f3199c98d7cf29f62a3ad8c2e56a .. _23c9d4b: https://github.com/python-semantic-release/python-semantic-release/commit/23c9d4b6a1716e65605ed985881452898d5cf644 .. _changelog-v3.11.0: v3.11.0 (2018-04-12) ==================== ✨ Features ----------- * Add --retry cli option (`PR#78`_, `3e312c0`_) * Add support to finding previous version from tags if not using commit messages (`PR#68`_, `6786487`_) * Be a bit more forgiving to find previous tags (`PR#68`_, `6786487`_) 🪲 Bug Fixes ------------ * Add pytest cache to gitignore (`b8efd5a`_) * Make repo non if it is not a git repository, closes `#74`_ (`1dc306b`_) 📖 Documentation ---------------- * Define ``--retry`` usage (`3e312c0`_) * Remove old notes about trello board (`7f50c52`_) * Update status badges (`cfa13b8`_) .. _#74: https://github.com/python-semantic-release/python-semantic-release/issues/74 .. _1dc306b: https://github.com/python-semantic-release/python-semantic-release/commit/1dc306b9b1db2ac360211bdc61fd815302d0014c .. _3e312c0: https://github.com/python-semantic-release/python-semantic-release/commit/3e312c0ce79a78d25016a3b294b772983cfb5e0f .. _6786487: https://github.com/python-semantic-release/python-semantic-release/commit/6786487ebf4ab481139ef9f43cd74e345debb334 .. _7f50c52: https://github.com/python-semantic-release/python-semantic-release/commit/7f50c521a522bb0c4579332766248778350e205b .. _b8efd5a: https://github.com/python-semantic-release/python-semantic-release/commit/b8efd5a6249c79c8378bffea3e245657e7094ec9 .. _cfa13b8: https://github.com/python-semantic-release/python-semantic-release/commit/cfa13b8260e3f3b0bfcb395f828ad63c9c5e3ca5 .. _PR#68: https://github.com/python-semantic-release/python-semantic-release/pull/68 .. _PR#78: https://github.com/python-semantic-release/python-semantic-release/pull/78 .. _changelog-v3.10.3: v3.10.3 (2018-01-29) ==================== 🪲 Bug Fixes ------------ * Error when not in git repository, closes `#74`_ (`PR#75`_, `251b190`_) .. _#74: https://github.com/python-semantic-release/python-semantic-release/issues/74 .. _251b190: https://github.com/python-semantic-release/python-semantic-release/commit/251b190a2fd5df68892346926d447cbc1b32475a .. _PR#75: https://github.com/python-semantic-release/python-semantic-release/pull/75 .. _changelog-v3.10.2: v3.10.2 (2017-08-03) ==================== 🪲 Bug Fixes ------------ * Update call to upload to work with twine 1.9.1 (`PR#72`_, `8f47643`_) .. _8f47643: https://github.com/python-semantic-release/python-semantic-release/commit/8f47643c54996e06c358537115e7e17b77cb02ca .. _PR#72: https://github.com/python-semantic-release/python-semantic-release/pull/72 .. _changelog-v3.10.1: v3.10.1 (2017-07-22) ==================== 🪲 Bug Fixes ------------ * Update Twine (`PR#69`_, `9f268c3`_) .. _9f268c3: https://github.com/python-semantic-release/python-semantic-release/commit/9f268c373a932621771abbe9607b739b1e331409 .. _PR#69: https://github.com/python-semantic-release/python-semantic-release/pull/69 .. _changelog-v3.10.0: v3.10.0 (2017-05-05) ==================== ✨ Features ----------- * Add git hash to the changelog (`PR#65`_, `628170e`_) 🪲 Bug Fixes ------------ * Make changelog problems not fail whole publish (`b5a68cf`_) 📖 Documentation ---------------- * Fix typo in cli.py docstring (`PR#64`_, `0d13985`_) .. _0d13985: https://github.com/python-semantic-release/python-semantic-release/commit/0d139859cd71f2d483f4360f196d6ef7c8726c18 .. _628170e: https://github.com/python-semantic-release/python-semantic-release/commit/628170ebc440fc6abf094dd3e393f40576dedf9b .. _b5a68cf: https://github.com/python-semantic-release/python-semantic-release/commit/b5a68cf6177dc0ed80eda722605db064f3fe2062 .. _PR#64: https://github.com/python-semantic-release/python-semantic-release/pull/64 .. _PR#65: https://github.com/python-semantic-release/python-semantic-release/pull/65 .. _changelog-v3.9.0: v3.9.0 (2016-07-03) =================== ✨ Features ----------- * Add option for choosing between versioning by commit or tag (`c0cd1f5`_) * Don't use file to track version, only tag to commit for versioning (`cd25862`_) * Get repo version from historical tags instead of config file (`a45a9bf`_) 🪲 Bug Fixes ------------ * Can't get the proper last tag from commit history (`5a0e681`_) .. _5a0e681: https://github.com/python-semantic-release/python-semantic-release/commit/5a0e681e256ec511cd6c6a8edfee9d905891da10 .. _a45a9bf: https://github.com/python-semantic-release/python-semantic-release/commit/a45a9bfb64538efeb7f6f42bb6e7ede86a4ddfa8 .. _c0cd1f5: https://github.com/python-semantic-release/python-semantic-release/commit/c0cd1f5b2e0776d7b636c3dd9e5ae863125219e6 .. _cd25862: https://github.com/python-semantic-release/python-semantic-release/commit/cd258623ee518c009ae921cd6bb3119dafae43dc .. _changelog-v3.8.1: v3.8.1 (2016-04-17) =================== 🪲 Bug Fixes ------------ * Add search_parent_directories option to gitpython (`PR#62`_, `8bf9ce1`_) .. _8bf9ce1: https://github.com/python-semantic-release/python-semantic-release/commit/8bf9ce11137399906f18bc8b25698b6e03a65034 .. _PR#62: https://github.com/python-semantic-release/python-semantic-release/pull/62 .. _changelog-v3.8.0: v3.8.0 (2016-03-21) =================== ✨ Features ----------- * Add ci checks for circle ci (`151d849`_) 🪲 Bug Fixes ------------ * Add git fetch to frigg after success (`74a6cae`_) * Make tag parser work correctly with breaking changes (`9496f6a`_) * Refactoring cli.py to improve --help and error messages (`c79fc34`_) 📖 Documentation ---------------- * Add info about correct commit guidelines (`af35413`_) * Add info about trello board in readme (`5229557`_) * Fix badges in readme (`7f4e549`_) * Update info about releases in contributing.md (`466f046`_) .. _151d849: https://github.com/python-semantic-release/python-semantic-release/commit/151d84964266c8dca206cef8912391cb73c8f206 .. _466f046: https://github.com/python-semantic-release/python-semantic-release/commit/466f0460774cad86e7e828ffb50c7d1332b64e7b .. _5229557: https://github.com/python-semantic-release/python-semantic-release/commit/5229557099d76b3404ea3677292332442a57ae2e .. _74a6cae: https://github.com/python-semantic-release/python-semantic-release/commit/74a6cae2b46c5150e63136fde0599d98b9486e36 .. _7f4e549: https://github.com/python-semantic-release/python-semantic-release/commit/7f4e5493edb6b3fb3510d0bb78fcc8d23434837f .. _9496f6a: https://github.com/python-semantic-release/python-semantic-release/commit/9496f6a502c79ec3acb4e222e190e76264db02cf .. _af35413: https://github.com/python-semantic-release/python-semantic-release/commit/af35413fae80889e2c5fc6b7d28f77f34b3b4c02 .. _c79fc34: https://github.com/python-semantic-release/python-semantic-release/commit/c79fc3469fb99bf4c7f52434fa9c0891bca757f9 .. _changelog-v3.7.2: v3.7.2 (2016-03-19) =================== 🪲 Bug Fixes ------------ * Move code around a bit to make flake8 happy (`41463b4`_) .. _41463b4: https://github.com/python-semantic-release/python-semantic-release/commit/41463b49b5d44fd94c11ab6e0a81e199510fabec .. _changelog-v3.7.1: v3.7.1 (2016-03-15) =================== 📖 Documentation ---------------- * **configuration**: Fix typo in setup.cfg section (`725d87d`_) .. _725d87d: https://github.com/python-semantic-release/python-semantic-release/commit/725d87dc45857ef2f9fb331222845ac83a3af135 .. _changelog-v3.7.0: v3.7.0 (2016-01-10) =================== ✨ Features ----------- * Add ci_checks for Frigg CI (`577c374`_) .. _577c374: https://github.com/python-semantic-release/python-semantic-release/commit/577c374396fe303b6fe7d64630d2959998d3595c .. _changelog-v3.6.1: v3.6.1 (2016-01-10) =================== 🪲 Bug Fixes ------------ * Add requests as dependency (`4525a70`_) .. _4525a70: https://github.com/python-semantic-release/python-semantic-release/commit/4525a70d5520b44720d385b0307e46fae77a7463 .. _changelog-v3.6.0: v3.6.0 (2015-12-28) =================== ✨ Features ----------- * Add checks for semaphore, closes `#44`_ (`2d7ef15`_) 📖 Documentation ---------------- * Add documentation for configuring on CI (`7806940`_) * Add note about node semantic release (`0d2866c`_) * Add step by step guide for configuring travis ci (`6f23414`_) * Move automatic-releases to subfolder (`ed68e5b`_) * Remove duplicate readme (`42a9421`_) .. _#44: https://github.com/python-semantic-release/python-semantic-release/issues/44 .. _0d2866c: https://github.com/python-semantic-release/python-semantic-release/commit/0d2866c528098ecaf1dd81492f28d3022a2a54e0 .. _2d7ef15: https://github.com/python-semantic-release/python-semantic-release/commit/2d7ef157b1250459060e99601ec53a00942b6955 .. _42a9421: https://github.com/python-semantic-release/python-semantic-release/commit/42a942131947cd1864c1ba29b184caf072408742 .. _6f23414: https://github.com/python-semantic-release/python-semantic-release/commit/6f2341442f61f0284b1119a2c49e96f0be678929 .. _7806940: https://github.com/python-semantic-release/python-semantic-release/commit/7806940ae36cb0d6ac0f966e5d6d911bd09a7d11 .. _ed68e5b: https://github.com/python-semantic-release/python-semantic-release/commit/ed68e5b8d3489463e244b078ecce8eab2cba2bb1 .. _changelog-v3.5.0: v3.5.0 (2015-12-22) =================== ✨ Features ----------- * Add author in commit, closes `#40`_ (`020efaa`_) * Checkout master before publishing (`dc4077a`_) 🪲 Bug Fixes ------------ * Remove " from git push command (`031318b`_) 📖 Documentation ---------------- * Convert readme to rst (`e8a8d26`_) .. _#40: https://github.com/python-semantic-release/python-semantic-release/issues/40 .. _020efaa: https://github.com/python-semantic-release/python-semantic-release/commit/020efaaadf588e3fccd9d2f08a273c37e4158421 .. _031318b: https://github.com/python-semantic-release/python-semantic-release/commit/031318b3268bc37e6847ec049b37425650cebec8 .. _dc4077a: https://github.com/python-semantic-release/python-semantic-release/commit/dc4077a2d07e0522b625336dcf83ee4e0e1640aa .. _e8a8d26: https://github.com/python-semantic-release/python-semantic-release/commit/e8a8d265aa2147824f18065b39a8e7821acb90ec .. _changelog-v3.4.0: v3.4.0 (2015-12-22) =================== ✨ Features ----------- * Add travis environment checks (`f386db7`_) .. _f386db7: https://github.com/python-semantic-release/python-semantic-release/commit/f386db75b77acd521d2f5bde2e1dde99924dc096 .. _changelog-v3.3.3: v3.3.3 (2015-12-22) =================== 🪲 Bug Fixes ------------ * Do git push and git push --tags instead of --follow-tags (`8bc70a1`_) .. _8bc70a1: https://github.com/python-semantic-release/python-semantic-release/commit/8bc70a183fd72f595c72702382bc0b7c3abe99c8 .. _changelog-v3.3.2: v3.3.2 (2015-12-21) =================== 🪲 Bug Fixes ------------ * Change build badge (`0dc068f`_) 📖 Documentation ---------------- * Update docstrings for generate_changelog (`987c6a9`_) .. _0dc068f: https://github.com/python-semantic-release/python-semantic-release/commit/0dc068fff2f8c6914f4abe6c4e5fb2752669159e .. _987c6a9: https://github.com/python-semantic-release/python-semantic-release/commit/987c6a96d15997e38c93a9d841c618c76a385ce7 .. _changelog-v3.3.1: v3.3.1 (2015-12-21) =================== 🪲 Bug Fixes ------------ * Add pandoc to travis settings (`17d40a7`_) * Only list commits from the last version tag, closes `#28`_ (`191369e`_) .. _#28: https://github.com/python-semantic-release/python-semantic-release/issues/28 .. _17d40a7: https://github.com/python-semantic-release/python-semantic-release/commit/17d40a73062ffa774542d0abc0f59fc16b68be37 .. _191369e: https://github.com/python-semantic-release/python-semantic-release/commit/191369ebd68526e5b1afcf563f7d13e18c8ca8bf .. _changelog-v3.3.0: v3.3.0 (2015-12-20) =================== ✨ Features ----------- * Add support for environment variables for pypi credentials (`3b383b9`_) 🪲 Bug Fixes ------------ * Add missing parameters to twine.upload (`4bae22b`_) * Better filtering of github token in push error (`9b31da4`_) * Downgrade twine to version 1.5.0 (`66df378`_) * Make sure the github token is not in the output (`55356b7`_) * Push to master by default (`a0bb023`_) .. _3b383b9: https://github.com/python-semantic-release/python-semantic-release/commit/3b383b92376a7530e89b11de481c4dfdfa273f7b .. _4bae22b: https://github.com/python-semantic-release/python-semantic-release/commit/4bae22bae9b9d9abf669b028ea3af4b3813a1df0 .. _55356b7: https://github.com/python-semantic-release/python-semantic-release/commit/55356b718f74d94dd92e6c2db8a15423a6824eb5 .. _66df378: https://github.com/python-semantic-release/python-semantic-release/commit/66df378330448a313aff7a7c27067adda018904f .. _9b31da4: https://github.com/python-semantic-release/python-semantic-release/commit/9b31da4dc27edfb01f685e6036ddbd4c715c9f60 .. _a0bb023: https://github.com/python-semantic-release/python-semantic-release/commit/a0bb023438a1503f9fdb690d976d71632f19a21f .. _changelog-v3.2.1: v3.2.1 (2015-12-20) =================== 🪲 Bug Fixes ------------ * Add requirements to manifest (`ed25ecb`_) * **pypi**: Add sdist as default in addition to bdist_wheel (`a1a35f4`_) .. _a1a35f4: https://github.com/python-semantic-release/python-semantic-release/commit/a1a35f43175187091f028474db2ebef5bfc77bc0 .. _ed25ecb: https://github.com/python-semantic-release/python-semantic-release/commit/ed25ecbaeec0e20ad3040452a5547bb7d6faf6ad .. _changelog-v3.2.0: v3.2.0 (2015-12-20) =================== ✨ Features ----------- * **angular-parser**: Remove scope requirement (`90c9d8d`_) * **git**: Add push to GH_TOKEN@github-url (`546b5bf`_) 🪲 Bug Fixes ------------ * **deps**: Use one file for requirements (`4868543`_) .. _4868543: https://github.com/python-semantic-release/python-semantic-release/commit/486854393b24803bb2356324e045ccab17510d46 .. _546b5bf: https://github.com/python-semantic-release/python-semantic-release/commit/546b5bf15466c6f5dfe93c1c03ca34604b0326f2 .. _90c9d8d: https://github.com/python-semantic-release/python-semantic-release/commit/90c9d8d4cd6d43be094cda86579e00b507571f98 .. _changelog-v3.1.0: v3.1.0 (2015-08-31) =================== ✨ Features ----------- * **pypi**: Add option to disable pypi upload (`f5cd079`_) .. _f5cd079: https://github.com/python-semantic-release/python-semantic-release/commit/f5cd079edb219de5ad03a71448d578f5f477da9c .. _changelog-v3.0.0: v3.0.0 (2015-08-25) =================== ✨ Features ----------- * **parser**: Add tag parser (`a7f392f`_) 🪲 Bug Fixes ------------ * **errors**: Add exposing of errors in package (`3662d76`_) * **version**: Parse file instead for version (`005dba0`_) .. _005dba0: https://github.com/python-semantic-release/python-semantic-release/commit/005dba0094eeb4098315ef383a746e139ffb504d .. _3662d76: https://github.com/python-semantic-release/python-semantic-release/commit/3662d7663291859dd58a91b4b4ccde4f0edc99b2 .. _a7f392f: https://github.com/python-semantic-release/python-semantic-release/commit/a7f392fd4524cc9207899075631032e438e2593c .. _changelog-v2.1.4: v2.1.4 (2015-08-24) =================== 🪲 Bug Fixes ------------ * **github**: Fix property calls (`7ecdeb2`_) .. _7ecdeb2: https://github.com/python-semantic-release/python-semantic-release/commit/7ecdeb22de96b6b55c5404ebf54a751911c4d8cd .. _changelog-v2.1.3: v2.1.3 (2015-08-22) =================== 🪲 Bug Fixes ------------ * **hvcs**: Make Github.token an property (`37d5e31`_) 📖 Documentation ---------------- * **api**: Update apidocs (`6185380`_) * **parsers**: Add documentation about commit parsers (`9b55422`_) * **readme**: Update readme with information about the changelog command (`56a745e`_) .. _37d5e31: https://github.com/python-semantic-release/python-semantic-release/commit/37d5e3110397596a036def5f1dccf0860964332c .. _56a745e: https://github.com/python-semantic-release/python-semantic-release/commit/56a745ef6fa4edf6f6ba09c78fcc141102cf2871 .. _6185380: https://github.com/python-semantic-release/python-semantic-release/commit/6185380babedbbeab2a2a342f17b4ff3d4df6768 .. _9b55422: https://github.com/python-semantic-release/python-semantic-release/commit/9b554222768036024a133153a559cdfc017c1d91 .. _changelog-v2.1.2: v2.1.2 (2015-08-20) =================== 🪲 Bug Fixes ------------ * **cli**: Fix call to generate_changelog in publish (`5f8bce4`_) .. _5f8bce4: https://github.com/python-semantic-release/python-semantic-release/commit/5f8bce4cbb5e1729e674efd6c651e2531aea2a16 .. _changelog-v2.1.1: v2.1.1 (2015-08-20) =================== 🪲 Bug Fixes ------------ * **history**: Fix issue in get_previous_version (`f961786`_) .. _f961786: https://github.com/python-semantic-release/python-semantic-release/commit/f961786aa3eaa3a620f47cc09243340fd329b9c2 .. _changelog-v2.1.0: v2.1.0 (2015-08-20) =================== ✨ Features ----------- * **cli**: Add the possibility to re-post the changelog (`4d028e2`_) 🪲 Bug Fixes ------------ * **cli**: Fix check of token in changelog command (`cc6e6ab`_) * **github**: Fix the github releases integration (`f0c3c1d`_) * **history**: Fix changelog generation (`f010272`_) .. _4d028e2: https://github.com/python-semantic-release/python-semantic-release/commit/4d028e21b9da01be8caac8f23f2c11e0c087e485 .. _cc6e6ab: https://github.com/python-semantic-release/python-semantic-release/commit/cc6e6abe1e91d3aa24e8d73e704829669bea5fd7 .. _f010272: https://github.com/python-semantic-release/python-semantic-release/commit/f01027203a8ca69d21b4aff689e60e8c8d6f9af5 .. _f0c3c1d: https://github.com/python-semantic-release/python-semantic-release/commit/f0c3c1db97752b71f2153ae9f623501b0b8e2c98 .. _changelog-v2.0.0: v2.0.0 (2015-08-19) =================== ✨ Features ----------- * **cli**: Add command for printing the changelog (`336b8bc`_) * **github**: Add github release changelog helper (`da18795`_) * **history**: Add angular parser (`91e4f0f`_) * **history**: Add generate_changelog function (`347f21a`_) * **history**: Add markdown changelog formatter (`d77b58d`_) * **history**: Set angular parser as the default (`c2cf537`_) * **publish**: Add publishing of changelog to github (`74324ba`_) * **settings**: Add loading of current parser (`7bd0916`_) 🪲 Bug Fixes ------------ * **cli**: Change output indentation on changelog (`2ca41d3`_) * **history**: Fix level id's in angular parser (`2918d75`_) * **history**: Fix regex in angular parser (`974ccda`_) * **history**: Support unexpected types in changelog generator (`13deacf`_) 💥 BREAKING CHANGES ------------------- * **history**: The default parser is now angular. Thus, the default behavior of the commit log evaluator will change. From now on it will use the angular commit message spec to determine the new version. .. _13deacf: https://github.com/python-semantic-release/python-semantic-release/commit/13deacf5d33ed500e4e94ea702a2a16be2aa7c48 .. _2918d75: https://github.com/python-semantic-release/python-semantic-release/commit/2918d759bf462082280ede971a5222fe01634ed8 .. _2ca41d3: https://github.com/python-semantic-release/python-semantic-release/commit/2ca41d3bd1b8b9d9fe7e162772560e3defe2a41e .. _336b8bc: https://github.com/python-semantic-release/python-semantic-release/commit/336b8bcc01fc1029ff37a79c92935d4b8ea69203 .. _347f21a: https://github.com/python-semantic-release/python-semantic-release/commit/347f21a1f8d655a71a0e7d58b64d4c6bc6d0bf31 .. _74324ba: https://github.com/python-semantic-release/python-semantic-release/commit/74324ba2749cdbbe80a92b5abbecfeab04617699 .. _7bd0916: https://github.com/python-semantic-release/python-semantic-release/commit/7bd0916f87a1f9fe839c853eab05cae1af420cd2 .. _91e4f0f: https://github.com/python-semantic-release/python-semantic-release/commit/91e4f0f4269d01b255efcd6d7121bbfd5a682e12 .. _974ccda: https://github.com/python-semantic-release/python-semantic-release/commit/974ccdad392d768af5e187dabc184be9ac3e133d .. _c2cf537: https://github.com/python-semantic-release/python-semantic-release/commit/c2cf537a42beaa60cd372c7c9f8fb45db8085917 .. _d77b58d: https://github.com/python-semantic-release/python-semantic-release/commit/d77b58db4b66aec94200dccab94f483def4dacc9 .. _da18795: https://github.com/python-semantic-release/python-semantic-release/commit/da187951af31f377ac57fe17462551cfd776dc6e .. _changelog-v1.0.0: v1.0.0 (2015-08-04) =================== 💥 Breaking ----------- * Restructure helpers into history and pypi (`00f64e6`_) 📖 Documentation ---------------- * Add automatic publishing documentation, resolves `#18`_ (`58076e6`_) .. _#18: https://github.com/python-semantic-release/python-semantic-release/issues/18 .. _00f64e6: https://github.com/python-semantic-release/python-semantic-release/commit/00f64e623db0e21470d55488c5081e12d6c11fd3 .. _58076e6: https://github.com/python-semantic-release/python-semantic-release/commit/58076e60bf20a5835b112b5e99a86c7425ffe7d9 .. _changelog-v0.9.1: v0.9.1 (2015-08-04) =================== 🪲 Bug Fixes ------------ * Fix ``get_current_head_hash`` to ensure it only returns the hash (`7c28832`_) .. _7c28832: https://github.com/python-semantic-release/python-semantic-release/commit/7c2883209e5bf4a568de60dbdbfc3741d34f38b4 .. _changelog-v0.9.0: v0.9.0 (2015-08-03) =================== ✨ Features ----------- * Add Python 2.7 support, resolves `#10`_ (`c05e13f`_) .. _#10: https://github.com/python-semantic-release/python-semantic-release/issues/10 .. _c05e13f: https://github.com/python-semantic-release/python-semantic-release/commit/c05e13f22163237e963c493ffeda7e140f0202c6 .. _changelog-v0.8.0: v0.8.0 (2015-08-03) =================== ✨ Features ----------- * Add ``check_build_status`` option, resolves `#5`_ (`310bb93`_) * Add ``get_current_head_hash`` in git helpers (`d864282`_) * Add git helper to get owner and name of repo (`f940b43`_) .. _#5: https://github.com/python-semantic-release/python-semantic-release/issues/5 .. _310bb93: https://github.com/python-semantic-release/python-semantic-release/commit/310bb9371673fcf9b7b7be48422b89ab99753f04 .. _d864282: https://github.com/python-semantic-release/python-semantic-release/commit/d864282c498f0025224407b3eeac69522c2a7ca0 .. _f940b43: https://github.com/python-semantic-release/python-semantic-release/commit/f940b435537a3c93ab06170d4a57287546bd8d3b .. _changelog-v0.7.0: v0.7.0 (2015-08-02) =================== ✨ Features ----------- * Add ``patch_without_tag`` option, resolves `#6`_ (`3734a88`_) 📖 Documentation ---------------- * Set up sphinx based documentation, resolves `#1`_ (`41fba78`_) .. _#1: https://github.com/python-semantic-release/python-semantic-release/issues/1 .. _#6: https://github.com/python-semantic-release/python-semantic-release/issues/6 .. _3734a88: https://github.com/python-semantic-release/python-semantic-release/commit/3734a889f753f1b9023876e100031be6475a90d1 .. _41fba78: https://github.com/python-semantic-release/python-semantic-release/commit/41fba78a389a8d841316946757a23a7570763c39 .. _changelog-v0.6.0: v0.6.0 (2015-08-02) =================== ✨ Features ----------- * Add twine for uploads to pypi, resolves `#13`_ (`eec2561`_) .. _#13: https://github.com/python-semantic-release/python-semantic-release/issues/13 .. _eec2561: https://github.com/python-semantic-release/python-semantic-release/commit/eec256115b28b0a18136a26d74cfc3232502f1a6 .. _changelog-v0.5.4: v0.5.4 (2015-07-29) =================== 🪲 Bug Fixes ------------ * Add python2 not supported warning (`e84c4d8`_) .. _e84c4d8: https://github.com/python-semantic-release/python-semantic-release/commit/e84c4d8b6f212aec174baccd188185627b5039b6 .. _changelog-v0.5.3: v0.5.3 (2015-07-28) =================== ⚙️ Build System --------------- * Add ``wheel`` as a dependency (`971e479`_) .. _971e479: https://github.com/python-semantic-release/python-semantic-release/commit/971e4795a8b8fea371fcc02dc9221f58a0559f32 .. _changelog-v0.5.2: v0.5.2 (2015-07-28) =================== 🪲 Bug Fixes ------------ * Fix python wheel tag (`f9ac163`_) .. _f9ac163: https://github.com/python-semantic-release/python-semantic-release/commit/f9ac163491666022c809ad49846f3c61966e10c1 .. _changelog-v0.5.1: v0.5.1 (2015-07-28) =================== 🪲 Bug Fixes ------------ * Fix push commands (`8374ef6`_) .. _8374ef6: https://github.com/python-semantic-release/python-semantic-release/commit/8374ef6bd78eb564a6d846b882c99a67e116394e .. _changelog-v0.5.0: v0.5.0 (2015-07-28) =================== ✨ Features ----------- * Add setup.py hook for the cli interface (`c363bc5`_) .. _c363bc5: https://github.com/python-semantic-release/python-semantic-release/commit/c363bc5d3cb9e9a113de3cd0c49dd54a5ea9cf35 .. _changelog-v0.4.0: v0.4.0 (2015-07-28) =================== ✨ Features ----------- * Add publish command (`d8116c9`_) .. _d8116c9: https://github.com/python-semantic-release/python-semantic-release/commit/d8116c9dec472d0007973939363388d598697784 .. _changelog-v0.3.2: v0.3.2 (2015-07-28) =================== * No change .. _changelog-v0.3.1: v0.3.1 (2015-07-28) =================== 🪲 Bug Fixes ------------ * Fix wheel settings (`1e860e8`_) .. _1e860e8: https://github.com/python-semantic-release/python-semantic-release/commit/1e860e8a4d9ec580449a0b87be9660a9482fa2a4 .. _changelog-v0.3.0: v0.3.0 (2015-07-27) =================== ✨ Features ----------- * Add support for tagging releases (`5f4736f`_) 🪲 Bug Fixes ------------ * Fix issue when version should not change (`441798a`_) .. _441798a: https://github.com/python-semantic-release/python-semantic-release/commit/441798a223195138c0d3d2c51fc916137fef9a6c .. _5f4736f: https://github.com/python-semantic-release/python-semantic-release/commit/5f4736f4e41bc96d36caa76ca58be0e1e7931069 .. _changelog-v0.2.0: v0.2.0 (2015-07-27) =================== ✨ Features ----------- * added no-operation (``--noop``) mode (`44c2039`_) ⚙️ Build System --------------- * Swapped pygit2 with gitpython to avoid libgit2 dependency (`8165a2e`_) .. _44c2039: https://github.com/python-semantic-release/python-semantic-release/commit/44c203989aabc9366ba42ed2bc40eaccd7ac891c .. _8165a2e: https://github.com/python-semantic-release/python-semantic-release/commit/8165a2eef2c6eea88bfa52e6db37abc7374cccba .. _changelog-v0.1.1: v0.1.1 (2015-07-27) =================== 🪲 Bug Fixes ------------ * Fix entry point (`bd7ce7f`_) .. _bd7ce7f: https://github.com/python-semantic-release/python-semantic-release/commit/bd7ce7f47c49e2027767fb770024a0d4033299fa .. _changelog-v0.1.0: v0.1.0 (2015-07-27) =================== * Initial Release python-semantic-release-9.21.0/CONTRIBUTING.rst000066400000000000000000000050451475670435200210250ustar00rootroot00000000000000Contributing ------------ If you want to contribute that is awesome. Remember to be nice to others in issues and reviews. Please remember to write tests for the cool things you create or fix. Unsure about something? No worries, `open an issue`_. .. _open an issue: https://github.com/relekang/python-semantic-release/issues/new Commit messages ~~~~~~~~~~~~~~~ Since python-semantic-release is released with python-semantic-release we need the commit messages to adhere to the `Conventional Commits Specification`_. Although scopes are optional, scopes are expected where applicable. Changes should be committed separately with the commit type they represent, do not combine them all into one commit. If you are unsure how to describe the change correctly just try and ask about it in your pr. If we think it should be something else or there is a pull-request without tags we will help out in adding or changing them. .. _Conventional Commits Specification: https://www.conventionalcommits.org/en/v1.0.0 Releases ~~~~~~~~ This package is released by python-semantic-release on each master build, thus if there are changes that should result in a new release it will happen if the build is green. Development ~~~~~~~~~~~ Install this module and the development dependencies .. code-block:: bash pip install -e .[dev,mypy,test] And if you'd like to build the documentation locally .. code-block:: bash pip install -e .[docs] sphinx-autobuild --open-browser docs docs/_build/html Testing ~~~~~~~ To test your modifications locally: .. code-block:: bash # Run type-checking, all tests across all supported Python versions tox # Run all tests for your current installed Python version (with full error output) pytest -vv --comprehensive # Run unit tests for your current installed Python version pytest # or pytest -vv -m unit # Run end-to-end tests for your current installed Python version (with full error output) pytest -vv -m e2e [--comprehensive] The ``--comprehensive`` flag is optional and will run all the variations of tests and it does take significantly longer to run. Building ~~~~~~~~ This project is designed to be versioned and built by itself using the ``tool.semantic_release`` configuration in ``pyproject.toml``. The setting ``tool.semantic_release.build_command`` defines the command to run to build the package. The following is a copy of the ``build_command`` setting which can be run manually to build the package locally: .. code-block:: bash pip install -e .[build] python -m build . python-semantic-release-9.21.0/Dockerfile000066400000000000000000000021061475670435200203510ustar00rootroot00000000000000# This Dockerfile is only for GitHub Actions FROM python:3.13-bookworm # Copy python-semantic-release source code into container COPY . /psr RUN \ # Install desired packages apt update && apt install -y --no-install-recommends \ # install git with git-lfs support git git-lfs \ # install python cmodule / binary module build utilities python3-dev gcc make cmake cargo \ # Configure global pip && { \ printf '%s\n' "[global]"; \ printf '%s\n' "no-cache-dir = true"; \ printf '%s\n' "disable-pip-version-check = true"; \ } > /etc/pip.conf \ # Create virtual environment for python-semantic-release && python3 -m venv /psr/.venv \ # Update core utilities in the virtual environment && /psr/.venv/bin/pip install -U pip setuptools wheel \ # Install psr & its dependencies from source into virtual environment && /psr/.venv/bin/pip install /psr \ # Cleanup && apt clean -y ENV PSR_DOCKER_GITHUB_ACTION=true ENV PYTHONDONTWRITEBYTECODE=1 ENTRYPOINT ["/bin/bash", "-l", "/psr/action.sh"] python-semantic-release-9.21.0/LICENSE000066400000000000000000000020731475670435200173670ustar00rootroot00000000000000The MIT License (MIT) Copyright (c) 2015 Rolf Erik Lekang Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. python-semantic-release-9.21.0/MANIFEST.in000066400000000000000000000002241475670435200201140ustar00rootroot00000000000000# Make sure non-python files are included graft src/**/data/ # include docs & testing into sdist, ignored for wheel build graft tests/ graft docs/ python-semantic-release-9.21.0/README.rst000066400000000000000000000015411475670435200200500ustar00rootroot00000000000000Python Semantic Release *********************** *Automating Releases via SemVer and Commit Message Conventions* ---- The official documentation for Python Semantic Release can be found at `python-semantic-release.readthedocs.io`_. GitHub Action ============= When using the Python Semantic Release GitHub Action, it executes the command ``semantic-release version`` using `python-semantic-release`_. The usage information and examples for this GitHub Action is available under the `GitHub Actions section`_ of `python-semantic-release.readthedocs.io`_. .. _python-semantic-release: https://pypi.org/project/python-semantic-release/ .. _python-semantic-release.readthedocs.io: https://python-semantic-release.readthedocs.io/en/latest/ .. _GitHub Actions section: https://python-semantic-release.readthedocs.io/en/latest/automatic-releases/github-actions.html python-semantic-release-9.21.0/action.sh000066400000000000000000000100661475670435200201740ustar00rootroot00000000000000#!/bin/bash set -e # Convert "true"/"false" into command line args, returns "" if not defined eval_boolean_action_input() { local -r input_name="$1" shift local -r flag_value="$1" shift local -r if_true="$1" shift local -r if_false="$1" if [ -z "$flag_value" ]; then echo "" elif [ "$flag_value" = "true" ]; then echo "$if_true" elif [ "$flag_value" = "false" ]; then echo "$if_false" else printf 'Error: Invalid value for input %s: %s is not "true" or "false\n"' \ "$input_name" "$flag_value" >&2 return 1 fi } # Convert inputs to command line arguments export ARGS=() # v10 Breaking change as prerelease should be as_prerelease to match ARGS+=("$(eval_boolean_action_input "prerelease" "$INPUT_PRERELEASE" "--as-prerelease" "")") || exit 1 ARGS+=("$(eval_boolean_action_input "commit" "$INPUT_COMMIT" "--commit" "--no-commit")") || exit 1 ARGS+=("$(eval_boolean_action_input "tag" "$INPUT_TAG" "--tag" "--no-tag")") || exit 1 ARGS+=("$(eval_boolean_action_input "push" "$INPUT_PUSH" "--push" "--no-push")") || exit 1 ARGS+=("$(eval_boolean_action_input "changelog" "$INPUT_CHANGELOG" "--changelog" "--no-changelog")") || exit 1 ARGS+=("$(eval_boolean_action_input "vcs_release" "$INPUT_VCS_RELEASE" "--vcs-release" "--no-vcs-release")") || exit 1 ARGS+=("$(eval_boolean_action_input "build" "$INPUT_BUILD" "" "--skip-build")") || exit 1 # Handle --patch, --minor, --major # https://stackoverflow.com/a/47541882 valid_force_levels=("prerelease" "patch" "minor" "major") if [ -z "$INPUT_FORCE" ]; then true # do nothing if 'force' input is not set elif printf '%s\0' "${valid_force_levels[@]}" | grep -Fxzq "$INPUT_FORCE"; then ARGS+=("--$INPUT_FORCE") else printf "Error: Input 'force' must be one of: %s\n" "${valid_force_levels[@]}" >&2 fi if [ -n "$INPUT_BUILD_METADATA" ]; then ARGS+=("--build-metadata $INPUT_BUILD_METADATA") fi if [ -n "$INPUT_PRERELEASE_TOKEN" ]; then ARGS+=("--prerelease-token $INPUT_PRERELEASE_TOKEN") fi # Change to configured directory cd "${INPUT_DIRECTORY}" # Set Git details if ! [ "${INPUT_GIT_COMMITTER_NAME:="-"}" = "-" ]; then git config --global user.name "$INPUT_GIT_COMMITTER_NAME" fi if ! [ "${INPUT_GIT_COMMITTER_EMAIL:="-"}" = "-" ]; then git config --global user.email "$INPUT_GIT_COMMITTER_EMAIL" fi if [ "${INPUT_GIT_COMMITTER_NAME:="-"}" != "-" ] && [ "${INPUT_GIT_COMMITTER_EMAIL:="-"}" != "-" ]; then # Must export this value to the environment for PSR to consume the override export GIT_COMMIT_AUTHOR="$INPUT_GIT_COMMITTER_NAME <$INPUT_GIT_COMMITTER_EMAIL>" fi # See https://github.com/actions/runner-images/issues/6775#issuecomment-1409268124 # and https://github.com/actions/runner-images/issues/6775#issuecomment-1410270956 git config --system --add safe.directory "*" if [[ -n "$INPUT_SSH_PUBLIC_SIGNING_KEY" && -n "$INPUT_SSH_PRIVATE_SIGNING_KEY" ]]; then echo "SSH Key pair found, configuring signing..." # Write keys to disk mkdir -vp ~/.ssh echo -e "$INPUT_SSH_PUBLIC_SIGNING_KEY" >>~/.ssh/signing_key.pub cat ~/.ssh/signing_key.pub echo -e "$INPUT_SSH_PRIVATE_SIGNING_KEY" >>~/.ssh/signing_key # DO NOT CAT private key for security reasons sha256sum ~/.ssh/signing_key # Ensure read only private key chmod 400 ~/.ssh/signing_key # Enable ssh-agent & add signing key eval "$(ssh-agent -s)" ssh-add ~/.ssh/signing_key # Create allowed_signers file for git if [ "${INPUT_GIT_COMMITTER_EMAIL:="-"}" = "-" ]; then echo >&2 "git_committer_email must be set to use SSH key signing!" exit 1 fi touch ~/.ssh/allowed_signers echo "$INPUT_GIT_COMMITTER_EMAIL $INPUT_SSH_PUBLIC_SIGNING_KEY" >~/.ssh/allowed_signers # Configure git for signing git config --global gpg.format ssh git config --global gpg.ssh.allowedSignersFile ~/.ssh/allowed_signers git config --global user.signingKey ~/.ssh/signing_key git config --global commit.gpgsign true git config --global tag.gpgsign true fi # Copy inputs into correctly-named environment variables export GH_TOKEN="${INPUT_GITHUB_TOKEN}" # Run Semantic Release (explicitly use the GitHub action version) eval "/psr/.venv/bin/semantic-release $INPUT_ROOT_OPTIONS version ${ARGS[*]}" python-semantic-release-9.21.0/action.yml000066400000000000000000000062671475670435200203730ustar00rootroot00000000000000--- name: Python Semantic Release description: Automated Releases via SemVer and Commit Message Conventions branding: color: orange inputs: root_options: default: "-v" required: false description: | Additional options for the main command. Example: -vv --noop directory: default: "." required: false description: Sub-directory to cd into before running semantic-release github_token: type: string required: true description: GitHub token used to push release notes and new commits/tags git_committer_name: type: string required: false description: The human name for the “committer” field git_committer_email: type: string required: false description: The email address for the “committer” field ssh_public_signing_key: type: string required: false description: The ssh public key used to sign commits ssh_private_signing_key: type: string required: false description: The ssh private key used to sign commits # `semantic-release version` command line options prerelease: type: string required: false description: | Force the next version to be a prerelease. Set to "true" or "false". prerelease_token: type: string required: false description: "Force the next version to use this prerelease token, if it is a prerelease" force: type: string required: false description: | Force the next version to be a major release. Must be set to one of "prerelease", "patch", "minor", or "major". commit: type: string required: false description: Whether or not to commit changes locally. Defaults are handled by python-semantic-release internal version command. tag: type: string required: false description: | Whether or not to make a local version tag. Defaults are handled by python-semantic-release internal version command. push: type: string required: false description: | Whether or not to push local commits to the Git repository. See the configuration page for defaults of `semantic-release version` for how the default is determined between push, tag, & commit. changelog: type: string required: false description: | Whether or not to update the changelog. vcs_release: type: string required: false description: | Whether or not to create a release in the remote VCS, if supported build: type: string required: false description: | Whether or not to run the build_command for the project. Defaults are handled by python-semantic-release internal version command. build_metadata: type: string required: false description: | Build metadata to append to the new version outputs: is_prerelease: description: | "true" if the version is a prerelease, "false" otherwise released: description: | "true" if a release was made, "false" otherwise tag: description: | The Git tag corresponding to the version output version: description: | The newly released version if one was made, otherwise the current version runs: using: docker image: Dockerfile python-semantic-release-9.21.0/config/000077500000000000000000000000001475670435200176255ustar00rootroot00000000000000python-semantic-release-9.21.0/config/release-templates/000077500000000000000000000000001475670435200232415ustar00rootroot00000000000000python-semantic-release-9.21.0/config/release-templates/.components/000077500000000000000000000000001475670435200255045ustar00rootroot00000000000000python-semantic-release-9.21.0/config/release-templates/.components/changelog_1.0.0.rst.j2000066400000000000000000000143771475670435200312270ustar00rootroot00000000000000{# This file overrides what would be generated normally because the commits are not conformative to the standard commit message format. #} .. _changelog-v1.0.0: v1.0.0 (2015-08-04) =================== 💥 Breaking ----------- * Restructure helpers into history and pypi (`00f64e6`_) 📖 Documentation ---------------- * Add automatic publishing documentation, resolves `#18`_ (`58076e6`_) .. _#18: https://github.com/python-semantic-release/python-semantic-release/issues/18 .. _00f64e6: https://github.com/python-semantic-release/python-semantic-release/commit/00f64e623db0e21470d55488c5081e12d6c11fd3 .. _58076e6: https://github.com/python-semantic-release/python-semantic-release/commit/58076e60bf20a5835b112b5e99a86c7425ffe7d9 .. _changelog-v0.9.1: v0.9.1 (2015-08-04) =================== 🪲 Bug Fixes ------------ * Fix ``get_current_head_hash`` to ensure it only returns the hash (`7c28832`_) .. _7c28832: https://github.com/python-semantic-release/python-semantic-release/commit/7c2883209e5bf4a568de60dbdbfc3741d34f38b4 .. _changelog-v0.9.0: v0.9.0 (2015-08-03) =================== ✨ Features ----------- * Add Python 2.7 support, resolves `#10`_ (`c05e13f`_) .. _#10: https://github.com/python-semantic-release/python-semantic-release/issues/10 .. _c05e13f: https://github.com/python-semantic-release/python-semantic-release/commit/c05e13f22163237e963c493ffeda7e140f0202c6 .. _changelog-v0.8.0: v0.8.0 (2015-08-03) =================== ✨ Features ----------- * Add ``check_build_status`` option, resolves `#5`_ (`310bb93`_) * Add ``get_current_head_hash`` in git helpers (`d864282`_) * Add git helper to get owner and name of repo (`f940b43`_) .. _#5: https://github.com/python-semantic-release/python-semantic-release/issues/5 .. _310bb93: https://github.com/python-semantic-release/python-semantic-release/commit/310bb9371673fcf9b7b7be48422b89ab99753f04 .. _d864282: https://github.com/python-semantic-release/python-semantic-release/commit/d864282c498f0025224407b3eeac69522c2a7ca0 .. _f940b43: https://github.com/python-semantic-release/python-semantic-release/commit/f940b435537a3c93ab06170d4a57287546bd8d3b .. _changelog-v0.7.0: v0.7.0 (2015-08-02) =================== ✨ Features ----------- * Add ``patch_without_tag`` option, resolves `#6`_ (`3734a88`_) 📖 Documentation ---------------- * Set up sphinx based documentation, resolves `#1`_ (`41fba78`_) .. _#1: https://github.com/python-semantic-release/python-semantic-release/issues/1 .. _#6: https://github.com/python-semantic-release/python-semantic-release/issues/6 .. _3734a88: https://github.com/python-semantic-release/python-semantic-release/commit/3734a889f753f1b9023876e100031be6475a90d1 .. _41fba78: https://github.com/python-semantic-release/python-semantic-release/commit/41fba78a389a8d841316946757a23a7570763c39 .. _changelog-v0.6.0: v0.6.0 (2015-08-02) =================== ✨ Features ----------- * Add twine for uploads to pypi, resolves `#13`_ (`eec2561`_) .. _#13: https://github.com/python-semantic-release/python-semantic-release/issues/13 .. _eec2561: https://github.com/python-semantic-release/python-semantic-release/commit/eec256115b28b0a18136a26d74cfc3232502f1a6 .. _changelog-v0.5.4: v0.5.4 (2015-07-29) =================== 🪲 Bug Fixes ------------ * Add python2 not supported warning (`e84c4d8`_) .. _e84c4d8: https://github.com/python-semantic-release/python-semantic-release/commit/e84c4d8b6f212aec174baccd188185627b5039b6 .. _changelog-v0.5.3: v0.5.3 (2015-07-28) =================== ⚙️ Build System --------------- * Add ``wheel`` as a dependency (`971e479`_) .. _971e479: https://github.com/python-semantic-release/python-semantic-release/commit/971e4795a8b8fea371fcc02dc9221f58a0559f32 .. _changelog-v0.5.2: v0.5.2 (2015-07-28) =================== 🪲 Bug Fixes ------------ * Fix python wheel tag (`f9ac163`_) .. _f9ac163: https://github.com/python-semantic-release/python-semantic-release/commit/f9ac163491666022c809ad49846f3c61966e10c1 .. _changelog-v0.5.1: v0.5.1 (2015-07-28) =================== 🪲 Bug Fixes ------------ * Fix push commands (`8374ef6`_) .. _8374ef6: https://github.com/python-semantic-release/python-semantic-release/commit/8374ef6bd78eb564a6d846b882c99a67e116394e .. _changelog-v0.5.0: v0.5.0 (2015-07-28) =================== ✨ Features ----------- * Add setup.py hook for the cli interface (`c363bc5`_) .. _c363bc5: https://github.com/python-semantic-release/python-semantic-release/commit/c363bc5d3cb9e9a113de3cd0c49dd54a5ea9cf35 .. _changelog-v0.4.0: v0.4.0 (2015-07-28) =================== ✨ Features ----------- * Add publish command (`d8116c9`_) .. _d8116c9: https://github.com/python-semantic-release/python-semantic-release/commit/d8116c9dec472d0007973939363388d598697784 .. _changelog-v0.3.2: v0.3.2 (2015-07-28) =================== * No change .. _changelog-v0.3.1: v0.3.1 (2015-07-28) =================== 🪲 Bug Fixes ------------ * Fix wheel settings (`1e860e8`_) .. _1e860e8: https://github.com/python-semantic-release/python-semantic-release/commit/1e860e8a4d9ec580449a0b87be9660a9482fa2a4 .. _changelog-v0.3.0: v0.3.0 (2015-07-27) =================== ✨ Features ----------- * Add support for tagging releases (`5f4736f`_) 🪲 Bug Fixes ------------ * Fix issue when version should not change (`441798a`_) .. _441798a: https://github.com/python-semantic-release/python-semantic-release/commit/441798a223195138c0d3d2c51fc916137fef9a6c .. _5f4736f: https://github.com/python-semantic-release/python-semantic-release/commit/5f4736f4e41bc96d36caa76ca58be0e1e7931069 .. _changelog-v0.2.0: v0.2.0 (2015-07-27) =================== ✨ Features ----------- * added no-operation (``--noop``) mode (`44c2039`_) ⚙️ Build System --------------- * Swapped pygit2 with gitpython to avoid libgit2 dependency (`8165a2e`_) .. _44c2039: https://github.com/python-semantic-release/python-semantic-release/commit/44c203989aabc9366ba42ed2bc40eaccd7ac891c .. _8165a2e: https://github.com/python-semantic-release/python-semantic-release/commit/8165a2eef2c6eea88bfa52e6db37abc7374cccba .. _changelog-v0.1.1: v0.1.1 (2015-07-27) =================== 🪲 Bug Fixes ------------ * Fix entry point (`bd7ce7f`_) .. _bd7ce7f: https://github.com/python-semantic-release/python-semantic-release/commit/bd7ce7f47c49e2027767fb770024a0d4033299fa python-semantic-release-9.21.0/config/release-templates/.components/changelog_header.rst.j2000066400000000000000000000004071475670435200320100ustar00rootroot00000000000000.. _changelog: {% if ctx.changelog_mode == "update" %}{# # Modified insertion flag to insert a changelog header directly # which convienently puts the insertion flag incognito when reading raw RST #}{{ insertion_flag ~ "\n" }}{% endif %} python-semantic-release-9.21.0/config/release-templates/.components/changelog_init.rst.j2000066400000000000000000000022721475670435200315250ustar00rootroot00000000000000{# This changelog template initializes a full changelog for the project, it follows the following logic: 1. Header 2. Any Unreleased Details (uncommon) 3. all previous releases except the very first release 4. the first release #}{# # # Header #}{% include "changelog_header.rst.j2" -%}{# # # Any Unreleased Details (uncommon) #}{% include "unreleased_changes.rst.j2" -%}{# # # Since this is initialization, we are generating all the previous # # release notes per version. The very first release notes is specialized. # # We also have non-conformative commits, so insert manual write-ups. #}{% if releases | length > 0 %}{% for release in releases %}{% if loop.last %}{{ "\n" }}{% include "first_release.rst.j2" -%}{{ "\n" }}{# #}{% elif release.version == "1.0.0" %}{# # Append 0.1.1 through 1.0.0 non-generated changelog only once #}{{ "\n" }}{% include "changelog_1.0.0.rst.j2" -%}{{ "\n\n" }}{# #}{% elif release.version > "1.0.0" %}{{ "\n" }}{% include "versioned_changes.rst.j2" -%}{{ "\n" }}{% endif %}{% endfor %}{% endif %} python-semantic-release-9.21.0/config/release-templates/.components/changelog_update.rst.j2000066400000000000000000000050541475670435200320450ustar00rootroot00000000000000{# This Update changelog template uses the following logic: 1. Read previous changelog file (ex. project_root/CHANGELOG.md) 2. Split on insertion flag (ex. ) 3. Print top half of previous changelog 3. New Changes (unreleased commits & newly released) 4. Print bottom half of previous changelog Note: if a previous file was not found, it does not write anything at the bottom but render does NOT fail #}{% set prev_changelog_contents = prev_changelog_file | read_file | safe %}{% set changelog_parts = prev_changelog_contents.split(insertion_flag, maxsplit=1) %}{# #}{% if changelog_parts | length < 2 %}{# # insertion flag was not found, check if the file was empty or did not exist #}{% if prev_changelog_contents | length > 0 %}{# # File has content but no insertion flag, therefore, file will not be updated #}{{ changelog_parts[0] }}{% else %}{# # File was empty or did not exist, therefore, it will be created from scratch #}{% include "changelog_init.rst.j2" %}{% endif %}{% else %}{# # Previous Changelog Header # - Depending if there is header content, then it will separate the insertion flag # with a newline from header content, otherwise it will just print the insertion flag #}{% set prev_changelog_top = changelog_parts[0] | trim %}{% if prev_changelog_top | length > 0 %}{{ "%s\n\n%s\n" | format(prev_changelog_top, insertion_flag | trim) }}{% else %}{{ "%s\n" | format(insertion_flag | trim) }}{% endif %}{# # Any Unreleased Details (uncommon) #}{% include "unreleased_changes.rst.j2" -%}{# #}{% if releases | length > 0 %}{# # Latest Release Details #}{% set release = releases[0] %}{# #}{% if releases | length == 1 and ctx.mask_initial_release %}{# # First Release detected #}{{ "\n" }}{%- include "first_release.rst.j2" -%}{{ "\n" }}{# #}{% elif release.version.as_semver_tag() ~ " (" not in changelog_parts[1] %}{# # The release version is not already in the changelog so we add it #}{{ "\n" }}{%- include "versioned_changes.rst.j2" -%}{{ "\n" }}{# #}{% endif %}{% endif %}{# # Previous Changelog Footer # - skips printing footer if empty, which happens when the insertion_flag # was at the end of the file (ignoring whitespace) #}{% set previous_changelog_bottom = changelog_parts[1] | trim %}{% if previous_changelog_bottom | length > 0 %}{{ "\n%s\n" | format(previous_changelog_bottom) }}{% endif %}{% endif %} python-semantic-release-9.21.0/config/release-templates/.components/changes.md.j2000066400000000000000000000116021475670435200277500ustar00rootroot00000000000000{% from 'macros.md.j2' import apply_alphabetical_ordering_by_brk_descriptions %}{% from 'macros.md.j2' import apply_alphabetical_ordering_by_descriptions %}{% from 'macros.md.j2' import apply_alphabetical_ordering_by_release_notices %}{% from 'macros.md.j2' import emoji_map, format_breaking_changes_description %}{% from 'macros.md.j2' import format_commit_summary_line, format_release_notice %}{% from 'macros.md.j2' import section_heading_order, section_heading_translations %}{# EXAMPLE: ### ✨ Features - Add new feature ([#10](https://domain.com/namespace/repo/pull/10), [`abcdef0`](https://domain.com/namespace/repo/commit/HASH)) - **scope**: Add new feature ([`abcdef0`](https://domain.com/namespace/repo/commit/HASH)) ### 🪲 Bug Fixes - Fix bug ([#11](https://domain.com/namespace/repo/pull/11), [`abcdef1`](https://domain.com/namespace/repo/commit/HASH)) ### 💥 Breaking Changes - With the change _____, the change causes ___ effect. Ultimately, this section it is a more detailed description of the breaking change. With an optional scope prefix like the commit messages above. - **scope**: this breaking change has a scope to identify the part of the code that this breaking change applies to for better context. ### 💡 Additional Release Information - This is a release note that provides additional information about the release that is not a breaking change or a feature/bug fix. - **scope**: this release note has a scope to identify the part of the code that this release note applies to for better context. #}{% set max_line_width = max_line_width | default(100) %}{% set hanging_indent = hanging_indent | default(2) %}{# #}{% for type_ in section_heading_order if type_ in commit_objects %}{# PREPROCESS COMMITS (order by description & format description line) #}{% set ns = namespace(commits=commit_objects[type_]) %}{% set _ = apply_alphabetical_ordering_by_descriptions(ns) %}{# #}{% set commit_descriptions = [] %}{# #}{% for commit in ns.commits %}{# # Generate the commit summary line and format it for Markdown #}{% set description = "- %s" | format(format_commit_summary_line(commit)) %}{% set description = description | autofit_text_width(max_line_width, hanging_indent) %}{% set _ = commit_descriptions.append(description) %}{% endfor %}{# # # PRINT SECTION (header & commits) #}{{ "\n" }}{{ "### %s %s\n" | format(emoji_map[type_], type_ | title) }}{{ "\n" }}{{ "%s\n" | format(commit_descriptions | unique | join("\n\n")) }}{% endfor %}{# # # Determine if any commits have a breaking change or release notice # # commit_objects is a dictionary of strings to a list of commits { "features", [ParsedCommit(), ...] } #}{% set breaking_commits = [] %}{% set notice_commits = [] %}{% for commits in commit_objects.values() %}{% set valid_commits = commits | rejectattr("error", "defined") | list %}{# # Filter out breaking change commits that have no breaking descriptions #}{% set _ = breaking_commits.extend( valid_commits | selectattr("breaking_descriptions.0") ) %}{# # Filter out ParsedCommits commits that have no release notices #}{% set _ = notice_commits.extend( valid_commits | selectattr("release_notices.0") ) %}{% endfor %}{# #}{% if breaking_commits | length > 0 %}{# PREPROCESS COMMITS #}{% set brk_ns = namespace(commits=breaking_commits) %}{% set _ = apply_alphabetical_ordering_by_brk_descriptions(brk_ns) %}{# #}{% set brking_descriptions = [] %}{# #}{% for commit in brk_ns.commits %}{% set full_description = "- %s" | format( format_breaking_changes_description(commit).split("\n\n") | join("\n\n- ") ) %}{% set _ = brking_descriptions.append( full_description | autofit_text_width(max_line_width, hanging_indent) ) %}{% endfor %}{# # # PRINT BREAKING CHANGE DESCRIPTIONS (header & descriptions) #}{{ "\n" }}{{ "### %s Breaking Changes\n" | format(emoji_map["breaking"]) }}{{ "\n%s\n" | format(brking_descriptions | unique | join("\n\n")) }}{# #}{% endif %}{# #}{% if notice_commits | length > 0 %}{# PREPROCESS COMMITS #}{% set notice_ns = namespace(commits=notice_commits) %}{% set _ = apply_alphabetical_ordering_by_release_notices(notice_ns) %}{# #}{% set release_notices = [] %}{# #}{% for commit in notice_ns.commits %}{% set full_description = "- %s" | format( format_release_notice(commit).split("\n\n") | join("\n\n- ") ) %}{% set _ = release_notices.append( full_description | autofit_text_width(max_line_width, hanging_indent) ) %}{% endfor %}{# # # PRINT RELEASE NOTICE INFORMATION (header & descriptions) #}{{ "\n" }}{{ "### %s Additional Release Information\n" | format(emoji_map["release_note"]) }}{{ "\n%s\n" | format(release_notices | unique | join("\n\n")) }}{# #}{% endif %} python-semantic-release-9.21.0/config/release-templates/.components/changes.rst.j2000066400000000000000000000153131475670435200301630ustar00rootroot00000000000000{% from 'macros.rst.j2' import apply_alphabetical_ordering_by_brk_descriptions %}{% from 'macros.rst.j2' import apply_alphabetical_ordering_by_descriptions %}{% from 'macros.rst.j2' import apply_alphabetical_ordering_by_release_notices %}{% from 'macros.rst.j2' import emoji_map, extract_issue_link_references, extract_pr_link_reference %}{% from 'macros.rst.j2' import format_breaking_changes_description, format_commit_summary_line %}{% from 'macros.rst.j2' import format_link_reference, format_release_notice %}{% from 'macros.rst.j2' import generate_heading_underline, section_heading_order %}{% from 'macros.rst.j2' import section_heading_translations %}{# ✨ Features ----------- * Add new feature (`#10`_, `8a7b8ec`_) * **scope**: Add another feature (`abcdef0`_) 🪲 Bug Fixes ------------ * Fix bug (`#11`_, `8a7b8ec`_) 💥 Breaking Changes ------------------- * With the change _____, the change causes ___ effect. Ultimately, this section it is a more detailed description of the breaking change. With an optional scope prefix like the commit messages above. * **scope**: this breaking change has a scope to identify the part of the code that this breaking change applies to for better context. 💡 Additional Release Information --------------------------------- * This is a release note that provides additional information about the release that is not a breaking change or a feature/bug fix. * **scope**: this release note has a scope to identify the part of the code that this release note applies to for better context. .. _8a7B8ec: https://domain.com/owner/repo/commit/8a7b8ec .. _abcdef0: https://domain.com/owner/repo/commit/abcdef0 .. _PR#10: https://domain.com/namespace/repo/pull/10 .. _PR#11: https://domain.com/namespace/repo/pull/11 #}{% set max_line_width = max_line_width | default(100) %}{% set hanging_indent = hanging_indent | default(2) %}{# #}{% set post_paragraph_links = [] %}{# #}{% for type_ in section_heading_order if type_ in commit_objects %}{# # PREPARE SECTION HEADER #}{% set section_header = "%s %s" | format( emoji_map[type_], type_ | title ) %}{# # # PREPROCESS COMMITS #}{% set ns = namespace(commits=commit_objects[type_]) %}{% set _ = apply_alphabetical_ordering_by_descriptions(ns) %}{# #}{% set commit_descriptions = [] %}{# #}{% for commit in ns.commits %}{# # Extract PR/MR reference if it exists and store it for later #}{% set pr_link_reference = extract_pr_link_reference(commit) | default("", true) %}{% if pr_link_reference != "" %}{% set _ = post_paragraph_links.append(pr_link_reference) %}{% endif %}{# # # Extract Issue references if they exists and store it for later #}{% set issue_urls_ns = namespace(urls=[]) %}{% set _ = extract_issue_link_references(issue_urls_ns, commit) %}{% set _ = post_paragraph_links.extend(issue_urls_ns.urls) %}{# # # Always generate a commit hash reference link and store it for later #}{% set commit_hash_link_reference = format_link_reference( commit.hexsha | commit_hash_url, commit.short_hash ) %}{% set _ = post_paragraph_links.append(commit_hash_link_reference) %}{# # # Generate the commit summary line and format it for RST #}{% set description = "* %s" | format(format_commit_summary_line(commit)) %}{% set description = description | convert_md_to_rst %}{% set description = description | autofit_text_width(max_line_width, hanging_indent) %}{% set _ = commit_descriptions.append(description) %}{% endfor %}{# # # PRINT SECTION (Header & Commits) # Note: Must add an additional character to the section header when determining the underline because of # the emoji character which can serve as 2 characters in length. #}{{ "\n" }}{{ section_header ~ "\n" }}{{ generate_heading_underline(section_header ~ " ", '-') ~ "\n" }}{{ "\n%s\n" | format(commit_descriptions | unique | join("\n\n")) }}{% endfor %}{# # # Determine if any commits have a breaking change or release notice # # commit_objects is a dictionary of strings to a list of commits { "features", [ParsedCommit(), ...] } #}{% set breaking_commits = [] %}{% set notice_commits = [] %}{% for commits in commit_objects.values() %}{% set valid_commits = commits | rejectattr("error", "defined") | list %}{# # Filter out breaking change commits that have no breaking descriptions #}{% set _ = breaking_commits.extend( valid_commits | selectattr("breaking_descriptions.0") ) %}{# # Filter out ParsedCommits commits that have no release notices #}{% set _ = notice_commits.extend( valid_commits | selectattr("release_notices.0") ) %}{% endfor %}{# #}{% if breaking_commits | length > 0 %}{# # PREPROCESS COMMITS #}{% set brk_ns = namespace(commits=breaking_commits) %}{% set _ = apply_alphabetical_ordering_by_brk_descriptions(brk_ns) %}{# #}{% set brking_descriptions = [] %}{# #}{% for commit in brk_ns.commits %}{% set full_description = "* %s" | format( format_breaking_changes_description(commit).split("\n\n") | join("\n\n* ") ) %}{% set _ = brking_descriptions.append( full_description | convert_md_to_rst | autofit_text_width(max_line_width, hanging_indent) ) %}{% endfor %}{# # # PRINT BREAKING CHANGE DESCRIPTIONS (header & descriptions) #}{{ "\n" }}{{ "%s Breaking Changes\n" | format(emoji_map["breaking"]) }}{{ '-------------------\n' }}{{ "\n%s\n" | format(brking_descriptions | unique | join("\n\n")) }}{# #}{% endif %}{# #}{% if notice_commits | length > 0 %}{# PREPROCESS COMMITS #}{% set notice_ns = namespace(commits=notice_commits) %}{% set _ = apply_alphabetical_ordering_by_release_notices(notice_ns) %}{# #}{% set release_notices = [] %}{# #}{% for commit in notice_ns.commits %}{% set full_description = "* %s" | format( format_release_notice(commit).split("\n\n") | join("\n\n* ") ) %}{% set _ = release_notices.append( full_description | convert_md_to_rst | autofit_text_width(max_line_width, hanging_indent) ) %}{% endfor %}{# # # PRINT RELEASE NOTICE INFORMATION (header & descriptions) #}{{ "\n" }}{{ "%s Additional Release Information\n" | format(emoji_map["release_note"]) }}{{ "---------------------------------\n" }}{{ "\n%s\n" | format(release_notices | unique | join("\n\n")) }}{# #}{% endif %}{# # # # PRINT POST PARAGRAPH LINKS #}{% if post_paragraph_links | length > 0 %}{# # Print out any PR/MR or Issue URL references that were found in the commit messages #}{{ "\n%s\n" | format(post_paragraph_links | unique | sort | join("\n")) }}{% endif %} python-semantic-release-9.21.0/config/release-templates/.components/first_release.rst.j2000066400000000000000000000007161475670435200314030ustar00rootroot00000000000000{% from "macros.rst.j2" import generate_heading_underline %}{# .. _changelog-vX.X.X: vX.X.X (YYYY-MMM-DD) ==================== * Initial Release #}{% set version_header = "%s (%s)" | format( release.version.as_semver_tag(), release.tagged_date.strftime("%Y-%m-%d") ) %} {{- ".. _changelog-%s:" | format(release.version.as_semver_tag()) }} {{ version_header }} {{ generate_heading_underline(version_header, "=") }} * Initial Release python-semantic-release-9.21.0/config/release-templates/.components/macros.md.j2000066400000000000000000000172261475670435200276340ustar00rootroot00000000000000{% set section_heading_translations = { 'feat': 'features', 'fix': 'bug fixes', 'perf': 'performance improvements', 'docs': 'documentation', 'build': 'build system', 'refactor': 'refactoring', 'test': 'testing', 'ci': 'continuous integration', 'chore': 'chores', 'style': 'code style', } %} {% set section_heading_order = section_heading_translations.values() %} {% set emoji_map = { 'breaking': '💥', 'features': '✨', 'bug fixes': '🪲', 'performance improvements': '⚡', 'documentation': '📖', 'build system': '⚙️', 'refactoring': '♻️', 'testing': '✅', 'continuous integration': '🤖', 'chores': '🧹', 'code style': '🎨', 'unknown': '❗', 'release_note': '💡', } %} {# MACRO: format a inline link reference in Markdown #}{% macro format_link(link, label) %}{{ "[%s](%s)" | format(label, link) }}{% endmacro %} {# MACRO: Capitalize the first letter of a string only #}{% macro capitalize_first_letter_only(sentence) %}{{ (sentence[0] | upper) ~ sentence[1:] }}{% endmacro %} {# MACRO: commit message links or PR/MR links of commit #}{% macro commit_msg_links(commit) %}{% if commit.error is undefined %}{# # # Initialize variables #}{% set link_references = [] %}{% set summary_line = capitalize_first_letter_only( commit.descriptions[0] | safe ) %}{# #}{% if commit.linked_merge_request != "" %}{% set pr_num = commit.linked_merge_request %}{# # TODO: breaking change v10, remove summary line replacers as PSR will do it for us #}{% set summary_line = summary_line | replace("(pull request ", "(") | replace("(" ~ pr_num ~ ")", "") | trim %}{# # # Add PR references with a link to the PR #}{% set _ = link_references.append( format_link(pr_num | pull_request_url, "PR" ~ pr_num) ) %}{% endif %}{# # # DEFAULT: Always include the commit hash as a link #}{% set _ = link_references.append( format_link( commit.hexsha | commit_hash_url, "`%s`" | format(commit.short_hash) ) ) %}{# #}{% set formatted_links = "" %}{% if link_references | length > 0 %}{% set formatted_links = " (%s)" | format(link_references | join(", ")) %}{% endif %}{# # Return the modified summary_line #}{{ summary_line ~ formatted_links }}{% endif %}{% endmacro %} {# MACRO: format commit summary line #}{% macro format_commit_summary_line(commit) %}{# # Check for Parsing Error #}{% if commit.error is undefined %}{# # # Add any message links to the commit summary line #}{% set summary_line = commit_msg_links(commit) %}{# #}{% if commit.scope %}{% set summary_line = "**%s**: %s" | format(commit.scope, summary_line) %}{% endif %}{# # # Return the modified summary_line #}{{ summary_line }}{# #}{% else %}{# # Return the first line of the commit if there was a Parsing Error #}{{ (commit.commit.message | string).split("\n", maxsplit=1)[0] }}{% endif %}{% endmacro %} {# MACRO: format the breaking changes description by: - Capitalizing the description - Adding an optional scope prefix #}{% macro format_breaking_changes_description(commit) %}{% set ns = namespace(full_description="") %}{# #}{% if commit.error is undefined %}{% for paragraph in commit.breaking_descriptions %}{% if paragraph | trim | length > 0 %}{# #}{% set paragraph_text = capitalize_first_letter_only(paragraph) | trim | safe %}{# #}{% set ns.full_description = [ ns.full_description, paragraph_text ] | join("\n\n") %}{# #}{% endif %}{% endfor %}{# #}{% set ns.full_description = ns.full_description | trim %}{# #}{% if commit.scope %}{% set ns.full_description = "**%s**: %s" | format( commit.scope, ns.full_description ) %}{% endif %}{% endif %}{# #}{{ ns.full_description }}{% endmacro %} {# MACRO: format the release notice by: - Capitalizing the description - Adding an optional scope prefix #}{% macro format_release_notice(commit) %}{% set ns = namespace(full_description="") %}{# #}{% if commit.error is undefined %}{% for paragraph in commit.release_notices %}{% if paragraph | trim | length > 0 %}{# #}{% set paragraph_text = capitalize_first_letter_only(paragraph) | trim | safe %}{# #}{% set ns.full_description = [ ns.full_description, paragraph_text ] | join("\n\n") %}{# #}{% endif %}{% endfor %}{# #}{% set ns.full_description = ns.full_description | trim %}{# #}{% if commit.scope %}{% set ns.full_description = "**%s**: %s" | format( commit.scope, ns.full_description ) %}{% endif %}{% endif %}{# #}{{ ns.full_description }}{% endmacro %} {# MACRO: order commits alphabetically by scope and attribute - Commits are sorted based on scope and then the attribute alphabetically - Commits without scope are placed first and sorted alphabetically by the attribute - parameter: ns (namespace) object with a commits list - parameter: attr (string) attribute to sort by - returns None but modifies the ns.commits list in place #}{% macro order_commits_alphabetically_by_scope_and_attr(ns, attr) %}{% set ordered_commits = [] %}{# # # Eliminate any ParseError commits from input set #}{% set filtered_commits = ns.commits | rejectattr("error", "defined") | list %}{# # # grab all commits with no scope and sort alphabetically by attr #}{% for commit in filtered_commits | rejectattr("scope") | sort(attribute=attr) %}{% set _ = ordered_commits.append(commit) %}{% endfor %}{# # # grab all commits with a scope and sort alphabetically by the scope and then attr #}{% for commit in filtered_commits | selectattr("scope") | sort(attribute=(['scope', attr] | join(","))) %}{% set _ = ordered_commits.append(commit) %}{% endfor %}{# # # Return the ordered commits #}{% set ns.commits = ordered_commits %}{% endmacro %} {# MACRO: apply smart ordering of commits objects based on alphabetized summaries and then scopes - Commits are sorted based on the commit type and the commit message - Commits are grouped by the commit type - parameter: ns (namespace) object with a commits list - returns None but modifies the ns.commits list in place #}{% macro apply_alphabetical_ordering_by_descriptions(ns) %}{% set _ = order_commits_alphabetically_by_scope_and_attr(ns, 'descriptions.0') %}{% endmacro %} {# MACRO: apply smart ordering of commits objects based on alphabetized breaking changes and then scopes - Commits are sorted based on the commit type and the commit message - Commits are grouped by the commit type - parameter: ns (namespace) object with a commits list - returns None but modifies the ns.commits list in place #}{% macro apply_alphabetical_ordering_by_brk_descriptions(ns) %}{% set _ = order_commits_alphabetically_by_scope_and_attr(ns, 'breaking_descriptions.0') %}{% endmacro %} {# MACRO: apply smart ordering of commits objects based on alphabetized release notices and then scopes - Commits are sorted based on the commit type and the commit message - Commits are grouped by the commit type - parameter: ns (namespace) object with a commits list - returns None but modifies the ns.commits list in place #}{% macro apply_alphabetical_ordering_by_release_notices(ns) %}{% set _ = order_commits_alphabetically_by_scope_and_attr(ns, 'release_notices.0') %}{% endmacro %} python-semantic-release-9.21.0/config/release-templates/.components/macros.rst.j2000066400000000000000000000226521475670435200300430ustar00rootroot00000000000000{# TODO: move to configuration for user to modify #} {% set section_heading_translations = { 'feat': 'features', 'fix': 'bug fixes', 'perf': 'performance improvements', 'docs': 'documentation', 'build': 'build system', 'refactor': 'refactoring', 'test': 'testing', 'ci': 'continuous integration', 'chore': 'chores', 'style': 'code style', } %} {% set section_heading_order = section_heading_translations.values() %} {% set emoji_map = { 'breaking': '💥', 'features': '✨', 'bug fixes': '🪲', 'performance improvements': '⚡', 'documentation': '📖', 'build system': '⚙️', 'refactoring': '♻️', 'testing': '✅', 'continuous integration': '🤖', 'chores': '🧹', 'code style': '🎨', 'unknown': '❗', 'release_note': '💡', } %} {# MACRO: format a post-paragraph link reference in RST #}{% macro format_link_reference(link, label) %}{{ ".. _%s: %s" | format(label, link) }}{% endmacro %} {# MACRO: Capitalize the first letter of a string only #}{% macro capitalize_first_letter_only(sentence) %}{{ (sentence[0] | upper) ~ sentence[1:] }}{% endmacro %} {# MACRO: format commit summary line #}{% macro format_commit_summary_line(commit) %}{# # Check for Parsing Error #}{% if commit.error is undefined %}{# # # Add any message links to the commit summary line #}{% set summary_line = commit_msg_links(commit) %}{# #}{% if commit.scope %}{% set summary_line = "**%s**: %s" | format(commit.scope, summary_line) %}{% endif %}{# # # Return the modified summary_line #}{{ summary_line }}{# #}{% else %}{# # Return the first line of the commit if there was a Parsing Error #}{{ (commit.commit.message | string).split("\n", maxsplit=1)[0] }}{% endif %}{% endmacro %} {# MACRO: Create & return an non-inline RST link from a commit message - Returns empty string if no PR/MR identifier is found #}{% macro extract_pr_link_reference(commit) %}{% if commit.error is undefined %}{% set summary_line = commit.descriptions[0] %}{# #}{% if commit.linked_merge_request != "" %}{# # Create a PR/MR reference url #}{{ format_link_reference( commit.linked_merge_request | pull_request_url, "PR" ~ commit.linked_merge_request, ) }}{% endif %}{% endif %}{% endmacro %} {# MACRO: Extract issue references from a parsed commit object - Stores the issue urls in the namespace object #}{% macro extract_issue_link_references(ns, commit) %}{% set issue_urls = [] %}{# #}{% if commit.linked_issues is defined and commit.linked_issues | length > 0 %}{% for issue_num in commit.linked_issues %}{# # Create an issue reference url #}{% set _ = issue_urls.append( format_link_reference( issue_num | issue_url, issue_num, ) ) %}{% endfor %}{% endif %}{# # # Store the issue urls in the namespace object #}{% set ns.urls = issue_urls %}{% endmacro %} {# MACRO: formats a commit message for a non-inline RST link for a commit hash and/or PR/MR #}{% macro commit_msg_links(commit) %}{% if commit.error is undefined %}{# # # Initialize variables #}{% set closes_statement = "" %}{% set link_references = [] %}{% set summary_line = capitalize_first_letter_only( commit.descriptions[0] | safe ) %}{# #}{% if commit.linked_issues | length > 0 %}{% set closes_statement = ", closes `%s`_" | format( commit.linked_issues | join("`_, `") ) %}{% endif %}{# #}{% if commit.linked_merge_request != "" %}{# # TODO: breaking change v10, remove summary line replacers as PSR will do it for us #}{% set summary_line = summary_line | replace("(pull request ", "(") | replace("(" ~ commit.linked_merge_request ~ ")", "") | trim %}{# # # Add PR references with a link to the PR #}{% set _ = link_references.append("`PR%s`_" | format(commit.linked_merge_request)) %}{% endif %}{# # DEFAULT: Always include the commit hash as a link #}{% set _ = link_references.append("`%s`_" | format(commit.short_hash)) %}{# #}{% set formatted_links = "" %}{% if link_references | length > 0 %}{% set formatted_links = " (%s)" | format(link_references | join(", ")) %}{% endif %}{# # Return the modified summary_line #}{{ summary_line ~ closes_statement ~ formatted_links }}{% endif %}{% endmacro %} {# MACRO: generate a heading underline that matches the exact length of the header #} {% macro generate_heading_underline(header, underline_char) %}{% set header_underline = [] %}{% for _ in header %}{% set __ = header_underline.append(underline_char) %}{% endfor %}{# # Print out the header underline #}{{ header_underline | join }}{% endmacro %} {# MACRO: format the breaking changes description by: - Capitalizing the description - Adding an optional scope prefix #}{% macro format_breaking_changes_description(commit) %}{% set ns = namespace(full_description="") %}{# #}{% if commit.error is undefined %}{% for paragraph in commit.breaking_descriptions %}{% if paragraph | trim | length > 0 %}{# #}{% set paragraph_text = capitalize_first_letter_only(paragraph) | trim | safe %}{# #}{% set ns.full_description = [ ns.full_description, paragraph_text ] | join("\n\n") %}{# #}{% endif %}{% endfor %}{# #}{% set ns.full_description = ns.full_description | trim %}{# #}{% if commit.scope %}{% set ns.full_description = "**%s**: %s" | format( commit.scope, ns.full_description ) %}{% endif %}{% endif %}{# #}{{ ns.full_description }}{% endmacro %} {# MACRO: format the release notice by: - Capitalizing the description - Adding an optional scope prefix #}{% macro format_release_notice(commit) %}{% set ns = namespace(full_description="") %}{# #}{% if commit.error is undefined %}{% for paragraph in commit.release_notices %}{% if paragraph | trim | length > 0 %}{# #}{% set paragraph_text = capitalize_first_letter_only(paragraph) | trim | safe %}{# #}{% set ns.full_description = [ ns.full_description, paragraph_text ] | join("\n\n") %}{# #}{% endif %}{% endfor %}{# #}{% set ns.full_description = ns.full_description | trim %}{# #}{% if commit.scope %}{% set ns.full_description = "**%s**: %s" | format( commit.scope, ns.full_description ) %}{% endif %}{% endif %}{# #}{{ ns.full_description }}{% endmacro %} {# MACRO: order commits alphabetically by scope and attribute - Commits are sorted based on scope and then the attribute alphabetically - Commits without scope are placed first and sorted alphabetically by the attribute - parameter: ns (namespace) object with a commits list - parameter: attr (string) attribute to sort by - returns None but modifies the ns.commits list in place #}{% macro order_commits_alphabetically_by_scope_and_attr(ns, attr) %}{% set ordered_commits = [] %}{# # # Eliminate any ParseError commits from input set #}{% set filtered_commits = ns.commits | rejectattr("error", "defined") | list %}{# # # grab all commits with no scope and sort alphabetically by attr #}{% for commit in filtered_commits | rejectattr("scope") | sort(attribute=attr) %}{% set _ = ordered_commits.append(commit) %}{% endfor %}{# # # grab all commits with a scope and sort alphabetically by the scope and then attr #}{% for commit in filtered_commits | selectattr("scope") | sort(attribute=(['scope', attr] | join(","))) %}{% set _ = ordered_commits.append(commit) %}{% endfor %}{# # # Return the ordered commits #}{% set ns.commits = ordered_commits %}{% endmacro %} {# MACRO: apply smart ordering of commits objects based on alphabetized summaries and then scopes - Commits are sorted based on the commit type and the commit message - Commits are grouped by the commit type - parameter: ns (namespace) object with a commits list - returns None but modifies the ns.commits list in place #}{% macro apply_alphabetical_ordering_by_descriptions(ns) %}{% set _ = order_commits_alphabetically_by_scope_and_attr(ns, 'descriptions.0') %}{% endmacro %} {# MACRO: apply smart ordering of commits objects based on alphabetized breaking changes and then scopes - Commits are sorted based on the commit type and the commit message - Commits are grouped by the commit type - parameter: ns (namespace) object with a commits list - returns None but modifies the ns.commits list in place #}{% macro apply_alphabetical_ordering_by_brk_descriptions(ns) %}{% set _ = order_commits_alphabetically_by_scope_and_attr(ns, 'breaking_descriptions.0') %}{% endmacro %} {# MACRO: apply smart ordering of commits objects based on alphabetized release notices and then scopes - Commits are sorted based on the commit type and the commit message - Commits are grouped by the commit type - parameter: ns (namespace) object with a commits list - returns None but modifies the ns.commits list in place #}{% macro apply_alphabetical_ordering_by_release_notices(ns) %}{% set _ = order_commits_alphabetically_by_scope_and_attr(ns, 'release_notices.0') %}{% endmacro %} python-semantic-release-9.21.0/config/release-templates/.components/unreleased_changes.rst.j2000066400000000000000000000003121475670435200323630ustar00rootroot00000000000000{% if unreleased_commits | length > 0 %} .. _changelog-unreleased: Unreleased ========== {% set commit_objects = unreleased_commits %}{% include "changes.rst.j2" -%}{{ "\n" }}{% endif %} python-semantic-release-9.21.0/config/release-templates/.components/versioned_changes.md.j2000066400000000000000000000007531475670435200320330ustar00rootroot00000000000000{# EXAMPLE: ## vX.X.X (YYYY-MMM-DD) _This release is published under the MIT License._ # Release Notes Only {{ change_sections }} #}{{ "## %s (%s)\n" | format( release.version.as_semver_tag(), release.tagged_date.strftime("%Y-%m-%d") ) }}{% if license_name is defined and license_name %}{{ "\n_This release is published under the %s License._\n" | format(license_name) }}{% endif %}{# #}{% set commit_objects = release["elements"] %}{% include "changes.md.j2" -%} python-semantic-release-9.21.0/config/release-templates/.components/versioned_changes.rst.j2000066400000000000000000000010311475670435200322310ustar00rootroot00000000000000{% from 'macros.rst.j2' import generate_heading_underline %}{# .. _changelog-X.X.X: vX.X.X (YYYY-MMM-DD) ==================== {{ change_sections }} #}{% set version_header = "%s (%s)" | format( release.version.as_semver_tag(), release.tagged_date.strftime("%Y-%m-%d") ) %}{# #}{{ ".. _changelog-%s:" | format(release.version.as_semver_tag()) }} {{ version_header }} {{ generate_heading_underline(version_header, "=") }} {# #}{% set commit_objects = release["elements"] %}{% include "changes.rst.j2" -%} python-semantic-release-9.21.0/config/release-templates/.release_notes.md.j2000066400000000000000000000100531475670435200270020ustar00rootroot00000000000000{% from ".components/macros.md.j2" import format_link %}{# EXAMPLE: ## v1.0.0 (2020-01-01) _This release is published under the MIT License._ ### ✨ Features - Add new feature ([PR#10](https://domain.com/namespace/repo/pull/10), [`abcdef0`](https://domain.com/namespace/repo/commit/HASH)) - **scope**: Add new feature ([`abcdef0`](https://domain.com/namespace/repo/commit/HASH)) ### 🪲 Bug Fixes - Fix bug ([PR#11](https://domain.com/namespace/repo/pull/11), [`abcdef1`](https://domain.com/namespace/repo/commit/HASH)) ### 💥 Breaking Changes - With the change _____, the change causes ___ effect. Ultimately, this section it is a more detailed description of the breaking change. With an optional scope prefix like the commit messages above. - **scope**: this breaking change has a scope to identify the part of the code that this breaking change applies to for better context. ### 💡 Additional Release Information - This is a release note that provides additional information about the release that is not a breaking change or a feature/bug fix. - **scope**: this release note has a scope to identify the part of the code that this release note applies to for better context. ### ✅ Resolved Issues - [#000](https://domain.com/namespace/repo/issues/000): _Title_ --- **Detailed Changes**: [vX.X.X...vX.X.X](https://domain.com/namespace/repo/compare/vX.X.X...vX.X.X) --- **Installable artifacts are available from**: - [PyPi Registry](https://pypi.org/project/package_name/x.x.x) - [GitHub Release Assets](https://github.com/namespace/repo/releases/tag/vX.X.X) #}{# # Set line width to 1000 to avoid wrapping as GitHub will handle it #}{% set max_line_width = max_line_width | default(1000) %}{% set hanging_indent = hanging_indent | default(2) %}{% set license_name = license_name | default("", True) %}{% set releases = context.history.released.values() | list %}{% set curr_release_index = releases.index(release) %}{# #}{% if mask_initial_release and curr_release_index == releases | length - 1 %}{# # On a first release, generate our special message #}{% include ".components/first_release.md.j2" %}{% else %}{# # Not the first release so generate notes normally #}{% include ".components/versioned_changes.md.j2" -%}{# # # If there are any commits that resolve issues, list out the issues with links #}{% set issue_resolving_commits = [] %}{% for commits in release["elements"].values() %}{% set _ = issue_resolving_commits.extend( commits | rejectattr("error", "defined") | selectattr("linked_issues") ) %}{% endfor %}{% if issue_resolving_commits | length > 0 %}{{ "\n### ✅ Resolved Issues\n" }}{# #}{% set issue_numbers = [] %}{% for linked_issues in issue_resolving_commits | map(attribute="linked_issues") %}{% set _ = issue_numbers.extend(linked_issues) %}{% endfor %}{% for issue_num in issue_numbers | unique | sort_numerically %}{{ "\n- %s: _Title_\n" | format(format_link(issue_num | issue_url, issue_num)) }}{# #}{% endfor %}{% endif %}{# #}{% set prev_release_index = curr_release_index + 1 %}{# #}{% if 'compare_url' is filter and prev_release_index < releases | length %}{% set prev_version_tag = releases[prev_release_index].version.as_tag() %}{% set new_version_tag = release.version.as_tag() %}{% set version_compare_url = prev_version_tag | compare_url(new_version_tag) %}{% set detailed_changes_link = '[{}...{}]({})'.format( prev_version_tag, new_version_tag, version_compare_url ) %}{{ "\n" }}{{ "---\n" }}{{ "\n" }}{{ "**Detailed Changes**: %s" | format(detailed_changes_link) }}{{ "\n" }}{% endif %}{% endif %}{# #} --- **Installable artifacts are available from**: {{ "- %s" | format( format_link( repo_name | create_pypi_url(release.version | string), "PyPi Registry", ) ) }} {{ "- %s" | format( format_link( release.version.as_tag() | create_release_url, "{vcs_name} Release Assets" | format_w_official_vcs_name, ) ) }} python-semantic-release-9.21.0/config/release-templates/CHANGELOG.rst.j2000066400000000000000000000013501475670435200255730ustar00rootroot00000000000000{# This changelog template controls which changelog creation occurs based on which mode is provided. Modes: - init: Initialize a full changelog from scratch - update: Insert new version details where the placeholder exists in the current changelog #}{% set this_file = "CHANGELOG.rst" %}{% set insertion_flag = ctx.changelog_insertion_flag %}{% set unreleased_commits = ctx.history.unreleased %}{% set releases = ctx.history.released.values() | list %}{# #}{% if ctx.changelog_mode == "init" %}{% include ".components/changelog_init.rst.j2" %}{# #}{% elif ctx.changelog_mode == "update" %}{% set prev_changelog_file = this_file %}{% include ".components/changelog_update.rst.j2" %}{# #}{% endif %} python-semantic-release-9.21.0/docs/000077500000000000000000000000001475670435200173105ustar00rootroot00000000000000python-semantic-release-9.21.0/docs/Makefile000066400000000000000000000155101475670435200207520ustar00rootroot00000000000000# Makefile for Sphinx documentation # # You can set these variables from the command line. SPHINXOPTS = SPHINXBUILD = sphinx-build PAPER = BUILDDIR = _build # User-friendly check for sphinx-build ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) $(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) endif # Internal variables. PAPEROPT_a4 = -D latex_paper_size=a4 PAPEROPT_letter = -D latex_paper_size=letter ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . # the i18n builder cannot share the environment and doctrees with the others I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext help: @echo "Please use \`make ' where is one of" @echo " html to make standalone HTML files" @echo " dirhtml to make HTML files named index.html in directories" @echo " singlehtml to make a single large HTML file" @echo " pickle to make pickle files" @echo " json to make JSON files" @echo " htmlhelp to make HTML files and a HTML help project" @echo " qthelp to make HTML files and a qthelp project" @echo " devhelp to make HTML files and a Devhelp project" @echo " epub to make an epub" @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" @echo " latexpdf to make LaTeX files and run them through pdflatex" @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" @echo " text to make text files" @echo " man to make manual pages" @echo " texinfo to make Texinfo files" @echo " info to make Texinfo files and run them through makeinfo" @echo " gettext to make PO message catalogs" @echo " changes to make an overview of all changed/added/deprecated items" @echo " xml to make Docutils-native XML files" @echo " pseudoxml to make pseudoxml-XML files for display purposes" @echo " linkcheck to check all external links for integrity" @echo " doctest to run all doctests embedded in the documentation (if enabled)" clean: rm -rf $(BUILDDIR)/* html: $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." dirhtml: $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." singlehtml: $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml @echo @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." pickle: $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle @echo @echo "Build finished; now you can process the pickle files." json: $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json @echo @echo "Build finished; now you can process the JSON files." htmlhelp: $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp @echo @echo "Build finished; now you can run HTML Help Workshop with the" \ ".hhp project file in $(BUILDDIR)/htmlhelp." qthelp: $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp @echo @echo "Build finished; now you can run "qcollectiongenerator" with the" \ ".qhcp project file in $(BUILDDIR)/qthelp, like this:" @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/python-semantic-release.qhcp" @echo "To view the help file:" @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/python-semantic-release.qhc" devhelp: $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp @echo @echo "Build finished." @echo "To view the help file:" @echo "# mkdir -p $$HOME/.local/share/devhelp/python-semantic-release" @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/python-semantic-release" @echo "# devhelp" epub: $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub @echo @echo "Build finished. The epub file is in $(BUILDDIR)/epub." latex: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." @echo "Run \`make' in that directory to run these through (pdf)latex" \ "(use \`make latexpdf' here to do that automatically)." latexpdf: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo "Running LaTeX files through pdflatex..." $(MAKE) -C $(BUILDDIR)/latex all-pdf @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." latexpdfja: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo "Running LaTeX files through platex and dvipdfmx..." $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." text: $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text @echo @echo "Build finished. The text files are in $(BUILDDIR)/text." man: $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man @echo @echo "Build finished. The manual pages are in $(BUILDDIR)/man." texinfo: $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo @echo @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." @echo "Run \`make' in that directory to run these through makeinfo" \ "(use \`make info' here to do that automatically)." info: $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo @echo "Running Texinfo files through makeinfo..." make -C $(BUILDDIR)/texinfo info @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." gettext: $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale @echo @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." changes: $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes @echo @echo "The overview file is in $(BUILDDIR)/changes." linkcheck: $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck @echo @echo "Link check complete; look for any errors in the above output " \ "or in $(BUILDDIR)/linkcheck/output.txt." doctest: $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest @echo "Testing of doctests in the sources finished, look at the " \ "results in $(BUILDDIR)/doctest/output.txt." xml: $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml @echo @echo "Build finished. The XML files are in $(BUILDDIR)/xml." pseudoxml: $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml @echo @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." coverage: $(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage livehtml: sphinx-autobuild "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) python-semantic-release-9.21.0/docs/algorithm.rst000066400000000000000000000226701475670435200220370ustar00rootroot00000000000000.. _algorithm: Python Semantic Release's Version Bumping Algorithm =================================================== Below is a technical description of the algorithm which Python Semantic Release uses to calculate a new version for a project. .. _algorithm-assumptions: Assumptions ~~~~~~~~~~~ * At runtime, we are in a Git repository with HEAD referring to a commit on some branch of the repository (i.e. not in detached HEAD state). * We know in advance whether we want to produce a prerelease or not (based on the configuration and command-line flags). * We can parse the tags of the repository into semantic versions, as we are given the format that those Git tags should follow via configuration, but cannot cherry-pick only tags that apply to commits on specific branches. We must parse all tags in order to ensure we have parsed any that might apply to commits in this branch's history. * If we can identify a commit as a ``merge-base`` between our HEAD commit and one or more tags, then that merge-base should be unique. * We know ahead of time what ``prerelease_token`` to use for prereleases - e.g. ``rc``. * We know ahead of time whether ``major`` changes introduced by commits should cause the new version to remain on ``0.y.z`` if the project is already on a ``0.`` version - see :ref:`major_on_zero `. .. _algorithm-implementation: Implementation ~~~~~~~~~~~~~~ 1. Parse all the Git tags of the repository into semantic versions, and **sort** in descending (most recent first) order according to `semver precedence`_. Ignore any tags which do not correspond to valid semantic versions according to ``tag_format``. 2. Find the ``merge-base`` of HEAD and the latest tag according to the sort above. Call this commit ``M``. If there are no tags in the repo's history, we set ``M=HEAD``. 3. Find the latest non-prerelease version whose tag references a commit that is an ancestor of ``M``. We do this via a breadth-first search through the commit lineage, starting against ``M``, and for each tag checking if the tag corresponds to that commit. We break from the search when we find such a tag. If no such tag is found, see 4a). Else, suppose that tag corresponds to a commit ``L`` - goto 4b). 4. a. If no commit corresponding to the last non-prerelease version is found, the entire history of the repository is considered. We parse every commit that is an ancestor of HEAD to determine the type of change introduced - either ``major``, ``minor``, ``patch``, ``prerelease_revision`` or ``no_release``. We store this levels in a ``set`` as we only require the distinct types of change that were introduced. b. However, if we found a commit ``L`` which is the commit against which the last non-prerelease was tagged, then we parse only the commits from HEAD as far back as ``L``, to understand what changes have been introduced since the previous non-prerelease. We store these levels - either ``major``, ``minor``, ``patch``, ``prerelease_revision``, or ``no_release``, in a set, as we only require the distinct types of change that were introduced. c. We look for tags that correspond to each commit during this process, to identify the latest pre-release that was made within HEAD's ancestry. 5. If there have been no changes since the last non-prerelease, or all commits since that release result in a ``no_release`` type according to the commit parser, then we **terminate the algorithm.** 6. If we have not exited by this point, we know the following information: * The latest version, by `semver precedence`_, within the whole repository. Call this ``LV``. This might not be within the ancestry of HEAD. * The latest version, prerelease or non-prerelease, within the whole repository. Call this ``LVH``. This might not be within the ancestry of HEAD. This may be the same as ``LV``. * The latest non-prerelease version within the ancestry of HEAD. Call this ``LVHF``. This may be the same as ``LVH``. * The most significant type of change introduced by the commits since the previous full release. Call this ``level`` * Whether or not we wish to produce a prerelease from this version increment. Call this a boolean flag, ``prerelease``. (Assumption) * Whether or not to increment the major digit if a major change is introduced against an existing ``0.`` version. Call this ``major_on_zero``, a boolean flag. (Assumption) Using this information, the new version is decided according to the following criteria: a. If ``LV`` has a major digit of ``0``, ``major_on_zero`` is ``False`` and ``level`` is ``major``, reduce ``level`` to ``minor``. b. If ``prerelease=True``, then i. Diff ``LV`` with ``LVHF``, to understand if the ``major``, ``minor`` or ``patch`` digits have changed. For example, diffing ``1.2.1`` and ``1.2.0`` is a ``patch`` diff, while diffing ``2.1.1`` and ``1.17.2`` is a ``major`` diff. Call this ``DIFF`` ii. If ``DIFF`` is less semantically significant than ``level``, for example if ``DIFF=patch`` and ``level=minor``, then 1. Increment the digit of ``LVF`` corresponding to ``level``, for example the minor digit if ``level=minor``, setting all less significant digits to zero. 2. Add ``prerelease_token`` as a suffix result of 1., together with a prerelease revision number of ``1``. Return this new version and **terminate the algorithm.** Thus if ``DIFF=patch``, ``level=minor``, ``prerelease=True``, ``prerelease_token="rc"``, and ``LVF=1.1.1``, then the version returned by the algorithm is ``1.2.0-rc.1``. iii. If ``DIFF`` is semantically less significant than or equally significant to ``level``, then this means that the significance of change introduced by ``level`` is already reflected in a prerelease version that has been created since the last full release. For example, if ``LVHF=1.1.1``, ``LV=1.2.0-rc.1`` and ``level=minor``. In this case we: 1. If the prerelease token of ``LV`` is different from ``prerelease_token``, take the major, minor and patch digits of ``LV`` and construct a prerelease version using our given ``prerelease_token`` and a prerelease revision of ``1``. We then return this version and **terminate the algorithm.** For example, if ``LV=1.2.0-rc.1`` and ``prerelease_token=alpha``, we return ``1.2.0-alpha.1``. 2. If the prerelease token of ``LV`` is the same as ``prerelease_token``, we increment the revision number of ``LV``, return this version, and **terminate the algorithm.** For example, if ``LV=1.2.0-rc.1`` and ``prerelease_token=rc``, we return ``1.2.0-rc.2``. c. If ``prerelease=False``, then i. If ``LV`` is not a prerelease, then we increment the digit of ``LV`` corresponding to ``level``, for example the minor digit if ``level=minor``, setting all less significant digits to zero. We return the result of this and **terminate the algorithm**. ii. If ``LV`` is a prerelease, then: 1. Diff ``LV`` with ``LVHF``, to understand if the ``major``, ``minor`` or ``patch`` digits have changed. Call this ``DIFF`` 2. If ``DIFF`` is less semantically significant than ``level``, then i. Increment the digit of ``LV`` corresponding to ``level``, for example the minor digit if ``level=minor``, setting all less significant digits to zero. ii. Remove the prerelease token and revision number from the result of i., ("Finalize" the result of i.) return the result and **terminate the algorithm.** For example, if ``LV=1.2.2-alpha.1`` and ``level=minor``, we return ``1.3.0``. 3. If ``DIFF`` is semantically less significant than or equally significant to ``level``, then we finalize ``LV``, return the result and **terminate the algorithm**. .. _semver precedence: https://semver.org/#spec-item-11 .. _algorithm-complexity: Complexity ~~~~~~~~~~ **Space:** A list of parsed tags takes ``O(number of tags)`` in space. Parsing each commit during the breadth-first search between ``merge-base`` and the latest tag in the ancestry of HEAD takes at worst ``O(number of commits)`` in space to track visited commits. Therefore worst-case space complexity will be linear in the number of commits in the repo, unless the number of tags significantly exceeds the number of commits (in which case it will be linear in the number of tags). **Time:** Assuming using regular expression parsing of each tag is a constant-time operation, then the following steps contribute to the time complexity of the algorithm: * Parsing each tag - ``O(number of tags)`` * Sorting tags by `semver precedence`_ - ``O(number of tags * log(number of tags))`` * Finding the merge-base of HEAD and the latest release tag - ``O(number of commits)`` (worst case) * Parsing each commit and checking each tag against each commit - ``O(number of commits) + O(number of tags * number of commits)`` (worst case) Overall, assuming that the number of tags is less than or equal to the number of commits in the repository, this would lead to a worst-case time complexity that's quadratic in the number of commits in the repo. python-semantic-release-9.21.0/docs/automatic-releases/000077500000000000000000000000001475670435200230775ustar00rootroot00000000000000python-semantic-release-9.21.0/docs/automatic-releases/cronjobs.rst000066400000000000000000000024041475670435200254500ustar00rootroot00000000000000.. _cronjobs: Publish with cronjobs ~~~~~~~~~~~~~~~~~~~~~ This is for you if for some reason you cannot publish from your CI or you would like releases to drop at a certain interval. Before you start, answer this: Are you sure you do not want a CI to release for you? (high version numbers are not a bad thing). The guide below is for setting up scheduled publishing on a server. It requires that the user that runs the cronjob has push access to the repository and upload access to an artifact repository. 1. Create a virtualenv:: virtualenv semantic_release -p `which python3` 2. Install python-semantic-release:: pip install python-semantic-release 3. Clone the repositories you want to have scheduled publishing. 3. Put the following in ``publish``:: VENV=semantic_release/bin $VENV/pip install -U pip python-semantic-release > /dev/null publish() { cd $1 git stash -u # ensures that there is no untracked files in the directory git fetch && git reset --hard origin/master $VENV/semantic-release version && $VENV/semantic-release publish cd .. } publish publish 4. Add cronjob:: /bin/bash -c "cd && source semantic_release/bin/activate && ./publish 2>&1 >> releases.log" python-semantic-release-9.21.0/docs/automatic-releases/github-actions.rst000066400000000000000000000610421475670435200265540ustar00rootroot00000000000000.. _gh_actions: GitHub Actions ============== There are two official GitHub Actions for Python Semantic Release: 1. :ref:`python-semantic-release/python-semantic-release@TAG ` This is the main action that runs the :ref:`version ` CLI command. It is used to (1) determine the next version number, (2) stamp the version number, (3) run the build command, (4) build the changelog, (5) commit the changes, (6) tag the commit, (7) publish the commit & tag and lastly (8) create a GitHub release. For more information review the :ref:`version command documentation ` and see :ref:`below ` for the Action configuration options. 2. :ref:`python-semantic-release/publish-action@TAG ` This action is used to execute the :ref:`publish ` CLI command. It is used to upload files, such as distribution artifacts and other assets, to a GitHub release. .. note:: These GitHub Actions are only simplified wrappers around the python-semantic-release CLI. Ultimately, they download and install the published package from PyPI so if you find that you are trying to do something more advanced or less common, you may need to install and use the CLI directly. .. _gh_actions-psr: Python Semantic Release Action '''''''''''''''''''''''''''''' The official `Python Semantic Release GitHub Action`_ is a `GitHub Docker Action`_, which means at the beginning of the job it will build a Docker image that contains the Python Semantic Release package and its dependencies. It will then run the job step inside the Docker Container. This is done to ensure that the environment is consistent across all GitHub Runners regardless of platform. With this choice, comes some limitations of non-configurable options like a pre-defined python version, lack of installed build tools, and an inability to utilize caching. The primary benefit of using the GitHub Action is that it is easy to set up and use for most projects. We handle a lot of the git configuration under the hood, so you don't have to handle it yourself. There are a plenty of customization options available which are detailed individually below. Most importantly your project's configuration file will be used as normal, as your project will be mounted into the container for the action to use. .. _Python Semantic Release GitHub Action: https://github.com/marketplace/actions/python-semantic-release .. _GitHub Docker Action: https://docs.github.com/en/actions/sharing-automations/creating-actions/creating-a-docker-container-action .. seealso:: `action.yml`__: the code definition of the action __ https://github.com/python-semantic-release/python-semantic-release/blob/master/action.yml .. _gh_actions-psr-inputs: Inputs ------ GitHub Action inputs are used for select configuration and provide the necessary information to execute the action. The inputs are passed to the action using the ``with`` keyword in the workflow file. Many inputs will mirror the command line options available in the :ref:`version ` command. This section outlines each supported input and its purpose. ---- .. _gh_actions-psr-inputs-build: ``build`` """"""""" **Type:** ``Literal["true", "false"]`` Override whether the action should execute the build command or not. This option is equivalent to adding the command line switch ``--skip-build`` (when ``false``) to the :ref:`version ` command. If set to ``true``, no command line switch is passed and the default behavior of the :ref:`version ` is used. **Required:** ``false`` .. note:: If not set or set to ``true``, the default behavior is defined by the :ref:`version ` command and any user :ref:`configurations `. .. seealso:: - :ref:`cmd-version-option-skip_build` option for the :ref:`version ` command. ---- .. _gh_actions-psr-inputs-build_metadata: ``build_metadata`` """""""""""""""""" **Type:** ``string`` Explicitly set the build metadata of the version. This is equivalent to running the command: .. code:: shell semantic-release version --build-metadata **Required:** ``false`` .. seealso:: - :ref:`cmd-version-option-build-metadata` option for the :ref:`version ` command ---- .. _gh_actions-psr-inputs-changelog: ``changelog`` """"""""""""" **Type:** ``Literal["true", "false"]`` Override whether the action should generate a changelog or not. This option is equivalent to adding either ``--changelog`` (on ``true``) or ``--no-changelog`` (on ``false``) to the :ref:`version ` command. **Required:** ``false`` .. note:: If not set, the default behavior is defined by the :ref:`version ` command and any user :ref:`configurations `. .. seealso:: - :ref:`cmd-version-option-changelog` options for the :ref:`version ` command ---- .. _gh_actions-psr-inputs-commit: ``commit`` """""""""" **Type:** ``Literal["true", "false"]`` Override whether the action should commit any changes to the local repository. Changes include the version stamps, changelog, and any other files that are modified and added to the index during the build command. This option is equivalent to adding either ``--commit`` (on ``true``) or ``--no-commit`` (on ``false``) to the :ref:`version ` command. **Required:** ``false`` .. note:: If not set, the default behavior is defined by the :ref:`version ` command and any user :ref:`configurations `. .. seealso:: - :ref:`cmd-version-option-commit` options for the :ref:`version ` command ---- .. _gh_actions-psr-inputs-directory: ``directory`` """"""""""""" If the project is not at the root of the repository (like in monorepos), you can specify a sub-directory to change into before running semantic-release. **Required:** ``false`` **Default:** ``.`` ---- .. _gh_actions-psr-inputs-force: ``force`` """"""""" **Type:** ``Literal["prerelease", "patch", "minor", "major"]`` Force the next version to be a specific bump type. This is equivalent to running the command: .. code:: shell semantic-release version -- # Ex: force a patch level version bump semantic-release version --patch **Required:** ``false`` .. seealso:: - :ref:`cmd-version-option-force-level` options for the :ref:`version ` command ---- .. _gh_actions-psr-inputs-git_committer_email: ``git_committer_email`` """"""""""""""""""""""" The email of the account used to commit. If customized, it must be associated with the provided token. **Required:** ``false`` ---- .. _gh_actions-psr-inputs-git_committer_name: ``git_committer_name`` """""""""""""""""""""" The name of the account used to commit. If customized, it must be associated with the provided token. **Required:** ``false`` ---- .. _gh_actions-psr-inputs-github_token: ``github_token`` """""""""""""""" The GitHub Token is essential for access to your GitHub repository to allow the push of commits & tags as well as to create a release. Not only do you need to provide the token as an input but you also need to ensure that the token has the correct permissions. The token should have the following `permissions`_: * id-token: write * contents: write **Required:** ``true`` .. _permissions: https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idpermissions ---- .. _gh_actions-psr-inputs-prerelease: ``prerelease`` """""""""""""" Force the version to be a prerelease version when set to ``true``. This is equivalent to running the command: .. code:: shell semantic-release version --as-prerelease **Required:** ``false`` .. note:: If not set, the default behavior is defined by the :ref:`version ` command and any user :ref:`configurations `. .. seealso:: - :ref:`cmd-version-option-as-prerelease` option for the :ref:`version ` command ---- .. _gh_actions-psr-inputs-prerelease_token: ``prerelease_token`` """""""""""""""""""" Override any prerelease token in the configuration file with this value, if it is a pre-release. This will override the matching release branch configuration's ``prerelease_token`` value. If you always want it to be a prerelease then you must also set the :ref:`gh_actions-psr-inputs-prerelease` input to ``true``. This option is equivalent to running the command: .. code:: shell semantic-release version --prerelease-token **Required:** ``false`` .. note:: If not set, the default behavior is defined by the :ref:`version ` command and any user :ref:`configurations `. .. seealso:: - :ref:`cmd-version-option-prerelease-token` option for the :ref:`version ` command ---- .. _gh_actions-psr-inputs-push: ``push`` """""""" **Type:** ``Literal["true", "false"]`` Override whether the action should push any commits or tags from the local repository to the remote repository. This option is equivalent to adding either ``--push`` (on ``true``) or ``--no-push`` (on ``false``) to the :ref:`version ` command. **Required:** ``false`` .. note:: If not set, the default behavior is defined by the :ref:`version ` command and any user :ref:`configurations `. .. seealso:: - :ref:`cmd-version-option-push` options for the :ref:`version ` command ---- .. _gh_actions-psr-inputs-root_options: ``root_options`` """""""""""""""" Additional options for the main ``semantic-release`` command, which will come before the :ref:`version ` subcommand. **Example** .. code:: yaml - uses: python-semantic-release/python-semantic-release@v9.21.0 with: root_options: "-vv --noop" This configuration would cause the command to be ``semantic-release -vv --noop version``, which would run the version command verbosely but in no-operation mode. **Required:** ``false`` **Default:** ``-v`` .. seealso:: - :ref:`Options ` for the :ref:`semantic-release ` command ---- .. _gh_actions-psr-inputs-ssh_public_signing_key: ``ssh_public_signing_key`` """""""""""""""""""""""""" The public key associated with the private key used in signing a commit and tag. **Required:** ``false`` ---- .. _gh_actions-psr-inputs-ssh_private_signing_key: ``ssh_private_signing_key`` """"""""""""""""""""""""""" The private key used to sign a commit and tag. **Required:** ``false`` ---- .. _gh_actions-psr-inputs-tag: ``tag`` """"""" **Type:** ``Literal["true", "false"]`` Override whether the action should create a version tag in the local repository. This option is equivalent to adding either ``--tag`` (on ``true``) or ``--no-tag`` (on ``false``) to the :ref:`version ` command. **Required:** ``false`` .. note:: If not set, the default behavior is defined by the :ref:`version ` command and any user :ref:`configurations `. .. seealso:: - :ref:`cmd-version-option-tag` options for the :ref:`version ` command ---- .. _gh_actions-psr-inputs-vcs_release: ``vcs_release`` """"""""""""""" **Type:** ``Literal["true", "false"]`` Override whether the action should create a release on the VCS. This option is equivalent to adding either ``--vcs-release`` (on ``true``) or ``--no-vcs-release`` (on ``false``) to the :ref:`version ` command. **Required:** ``false`` .. note:: If not set, the default behavior is defined by the :ref:`version ` command and any user :ref:`configurations `. .. seealso:: - :ref:`cmd-version-option-vcs-release` options for the :ref:`version ` command ---- .. _gh_actions-psr-outputs: Outputs ------- The Python Semantic Release Action also provides outputs that can be used in subsequent steps of the workflow. These outputs are used to provide information about the release and any actions that were taken. ---- .. _gh_actions-psr-outputs-is_prerelease: ``is_prerelease`` """"""""""""""""" **Type:** ``Literal["true", "false"]`` A boolean value indicating whether the released version is a prerelease. ---- .. _gh_actions-psr-outputs-released: ``released`` """""""""""" **Type:** ``Literal["true", "false"]`` A boolean value indicating whether a release was made. ---- .. _gh_actions-psr-outputs-version: ``version`` """"""""""" **Type:** ``string`` The newly released SemVer version string if one was made, otherwise the current version. Example: ``1.2.3`` ---- .. _gh_actions-psr-outputs-tag: ``tag`` """"""" **Type:** ``string`` The Git tag corresponding to the ``version`` output but in the tag format dictated by your configuration. Example: ``v1.2.3`` ---- .. _gh_actions-publish: Python Semantic Release Publish Action '''''''''''''''''''''''''''''''''''''' The official `Python Semantic Release Publish Action`_ is a `GitHub Docker Action`_, which means at the beginning of the job it will build a Docker image that contains the Python Semantic Release package and its dependencies. It will then run the job step inside the Docker Container. This is done to ensure that the environment is consistent across all GitHub Runners regardless of platform. With this choice, comes some limitations of non-configurable options like a pre-defined python version, lack of additional 3rd party tools, and an inability to utilize caching. The primary benefit of using the GitHub Action is that it is easy to set up and use for most projects. We handle some additional configuration under the hood, so you don't have to handle it yourself. We do however provide a few customization options which are detailed individually below. Most importantly your project's configuration file will be used as normal, as your project will be mounted into the container for the action to use. If you have issues with the action, please open an issue on the `python-semantic-release/publish-action`_ repository. .. _Python Semantic Release Publish Action: https://github.com/marketplace/actions/python-semantic-release-publish .. seealso:: - `action.yml`__: the code definition for the publish action __ https://github.com/python-semantic-release/publish-action/blob/main/action.yml .. _gh_actions-publish-inputs: Inputs ------ GitHub Action inputs are used for select configuration and provide the necessary information to execute the action. The inputs are passed to the action using the ``with`` keyword in the workflow file. Many inputs will mirror the command line options available in the :ref:`publish ` command and others will be specific to adjustment of the action environment. This section outlines each supported input and its purpose. ---- .. _gh_actions-publish-inputs-directory: ``directory`` """"""""""""" If the project is not at the root of the repository (like in monorepos), you can specify a sub-directory to change into before running semantic-release. **Required:** ``false`` **Default:** ``.`` ---- .. _gh_actions-publish-inputs-github_token: ``github_token`` """""""""""""""" The GitHub Token is essential for access to your GitHub repository to allow the publish of assets to a release. Not only do you need to provide the token as an input but you also need to ensure that the token has the correct permissions. The token should have the following `permissions`_: * ``contents: write``: Required for modifying a GitHub Release **Required:** ``true`` .. _permissions: https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idpermissions ---- .. _gh_actions-publish-inputs-root_options: ``root_options`` """""""""""""""" Additional options for the main ``semantic-release`` command, which will come before the :ref:`publish ` subcommand. **Example** .. code:: yaml - uses: python-semantic-release/publish-action@v9.21.0 with: root_options: "-vv --noop" This configuration would cause the command to be ``semantic-release -vv --noop publish``, which would run the publish command verbosely but in no-operation mode. **Required:** ``false`` **Default:** ``-v`` .. seealso:: - :ref:`Options ` for the :ref:`semantic-release ` command ---- .. _gh_actions-publish-inputs-tag: ``tag`` """"""" **Type:** ``string`` The tag corresponding to the GitHub Release that the artifacts should be published to. This option is equivalent to running the command: .. code:: shell semantic-release publish --tag Python Semantic Release will automatically determine the latest release if no ``--tag`` option is provided. **Required:** ``false`` .. seealso:: - :ref:`cmd-publish-option-tag` option for the :ref:`publish ` command ---- .. _gh_actions-publish-outputs: Outputs ------- There are no outputs provided by the Python Semantic Release Publish Action at this time. .. note:: If you would like outputs to be provided by this action, please open an issue on the `python-semantic-release/publish-action`_ repository. .. _python-semantic-release/publish-action: https://github.com/python-semantic-release/publish-action/issues ---- .. _gh_actions-examples: Examples '''''''' Common Workflow Example ----------------------- The following is a simple common workflow example that uses both the Python Semantic Release Action and the Python Semantic Release Publish Action. This workflow will run on every push to the ``main`` branch and will create a new release upon a successful version determination. If a version is released, the workflow will then publish the package to PyPI and upload the package to the GitHub Release Assets as well. .. code:: yaml name: Continuous Delivery on: push: branches: - main jobs: release: runs-on: ubuntu-latest concurrency: group: ${{ github.workflow }}-release-${{ github.ref_name }} cancel-in-progress: false permissions: id-token: write contents: write steps: # Note: We checkout the repository at the branch that triggered the workflow # with the entire history to ensure to match PSR's release branch detection # and history evaluation. # However, we forcefully reset the branch to the workflow sha because it is # possible that the branch was updated while the workflow was running. This # prevents accidentally releasing un-evaluated changes. - name: Setup | Checkout Repository on Release Branch uses: actions/checkout@v4 with: ref: ${{ github.ref_name }} fetch-depth: 0 - name: Setup | Force release branch to be at workflow sha run: | git reset --hard ${{ github.sha }} - name: Evaluate | Verify upstream has NOT changed # Last chance to abort before causing an error as another PR/push was applied to # the upstream branch while this workflow was running. This is important # because we are committing a version change (--commit). You may omit this step # if you have 'commit: false' in your configuration. # # You may consider moving this to a repo script and call it from this step instead # of writing it in-line. shell: bash run: | set +o pipefail UPSTREAM_BRANCH_NAME="$(git status -sb | head -n 1 | cut -d' ' -f2 | grep -E '\.{3}' | cut -d'.' -f4)" printf '%s\n' "Upstream branch name: $UPSTREAM_BRANCH_NAME" set -o pipefail if [ -z "$UPSTREAM_BRANCH_NAME" ]; then printf >&2 '%s\n' "::error::Unable to determine upstream branch name!" exit 1 fi git fetch "${UPSTREAM_BRANCH_NAME%%/*}" if ! UPSTREAM_SHA="$(git rev-parse "$UPSTREAM_BRANCH_NAME")"; then printf >&2 '%s\n' "::error::Unable to determine upstream branch sha!" exit 1 fi HEAD_SHA="$(git rev-parse HEAD)" if [ "$HEAD_SHA" != "$UPSTREAM_SHA" ]; then printf >&2 '%s\n' "[HEAD SHA] $HEAD_SHA != $UPSTREAM_SHA [UPSTREAM SHA]" printf >&2 '%s\n' "::error::Upstream has changed, aborting release..." exit 1 fi printf '%s\n' "Verified upstream branch has not changed, continuing with release..." - name: Action | Semantic Version Release id: release # Adjust tag with desired version if applicable. uses: python-semantic-release/python-semantic-release@v9.21.0 with: github_token: ${{ secrets.GITHUB_TOKEN }} git_committer_name: "github-actions" git_committer_email: "actions@users.noreply.github.com" - name: Publish | Upload package to PyPI uses: pypa/gh-action-pypi-publish@v1 if: steps.release.outputs.released == 'true' - name: Publish | Upload to GitHub Release Assets uses: python-semantic-release/publish-action@v9.21.0 if: steps.release.outputs.released == 'true' with: github_token: ${{ secrets.GITHUB_TOKEN }} tag: ${{ steps.release.outputs.tag }} .. important:: The `concurrency`_ directive is used on the job to prevent race conditions of more than one release job in the case if there are multiple pushes to ``main`` in a short period of time. Secondly the *Evaluate | Verify upstream has NOT changed* step is used to ensure that the upstream branch has not changed while the workflow was running. This is important because we are committing a version change (``commit: true``) and there might be a push collision that would cause undesired behavior. Review Issue `#1201`_ for more detailed information. .. warning:: You must set ``fetch-depth`` to 0 when using ``actions/checkout@v4``, since Python Semantic Release needs access to the full history to build a changelog and at least the latest tags to determine the next version. .. warning:: The ``GITHUB_TOKEN`` secret is automatically configured by GitHub, with the same permissions role as the user who triggered the workflow run. This causes a problem if your default branch is protected to specific users. You can work around this by storing an administrator's Personal Access Token as a separate secret and using that instead of ``GITHUB_TOKEN``. In this case, you will also need to pass the new token to ``actions/checkout`` (as the ``token`` input) in order to gain push access. .. _#1201: https://github.com/python-semantic-release/python-semantic-release/issues/1201 .. _concurrency: https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions#jobsjob_idconcurrency Version Overrides Example ------------------------- In the case where you want to provide multiple command line options to the :ref:`version ` command, you provide them through the ``with`` directive in the workflow file. In this example, we want to force a patch version bump, not produce a changelog, and provide specialized build metadata. As a regular CLI command, this would look like: .. code:: shell semantic-release version --patch --no-changelog --build-metadata abc123 The equivalent GitHub Action configuration would be: .. code:: yaml # snippet - name: Action | Semantic Version Release # Adjust tag with desired version if applicable. uses: python-semantic-release/python-semantic-release@v9.21.0 with: github_token: ${{ secrets.GITHUB_TOKEN }} force: patch changelog: false build_metadata: abc123 .. _gh_actions-monorepo: Actions with Monorepos '''''''''''''''''''''' While ``python-semantic-release`` does **NOT** have full monorepo support, if you have multiple projects stored within a single repository (or your project is not at the root of the repository), you can pass the :ref:`directory ` input to the action to change directory before semantic-release execution. For multiple packages, you would need to run the action multiple times, to release each project. The following example demonstrates how to release two projects in a monorepo. The ``directory`` input directive is also available for the Python Semantic Release Publish Action. .. code:: yaml - name: Release Project 1 uses: python-semantic-release/python-semantic-release@v9.21.0 with: directory: ./project1 github_token: ${{ secrets.GITHUB_TOKEN }} - name: Release Project 2 uses: python-semantic-release/python-semantic-release@v9.21.0 with: directory: ./project2 github_token: ${{ secrets.GITHUB_TOKEN }} python-semantic-release-9.21.0/docs/automatic-releases/index.rst000066400000000000000000000006501475670435200247410ustar00rootroot00000000000000.. _automatic: Automatic Releases ------------------ The key point with using this package is to automate your releases and stop worrying about version numbers. Different approaches to automatic releases and publishing with the help of this package can be found below. Using a CI is the recommended approach. .. _automatic-guides: Guides ^^^^^^ .. toctree:: :maxdepth: 2 travis github-actions cronjobs python-semantic-release-9.21.0/docs/automatic-releases/travis.rst000066400000000000000000000043341475670435200251450ustar00rootroot00000000000000Setting up python-semantic-release on Travis CI ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ This guide expects you to have activated the repository on Travis CI. If this is not the case, please refer to `Travis documentation`_ on how to do that. 1. Add python-semantic-release settings ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ See :doc:`../configuration` for details on how to configure Python Semantic Release. Make sure that at least you have set :ref:`config-version_variables` before continuing. 2. Add environment variables ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ You will need to set up an environment variable in Travis. An easy way to do that is to go to the settings page for your package and add it there. Make sure that the secret toggle is set correctly. You need to set the :ref:`GH_TOKEN ` environment variable with a personal access token for Github. It will need either ``repo`` or ``public_repo`` scope depending on whether the repository is private or public. More information on how to set environment variables can be found on `Travis documentation on environment variables`_. 3. Add travis configuration ^^^^^^^^^^^^^^^^^^^^^^^^^^^ The following should be added to your ``.travis.yml`` file. .. code-block:: yaml after_success: - git config --global user.name "semantic-release (via TravisCI)" - git config --global user.email "semantic-release@travis" - pip install python-semantic-release - semantic-release version && semantic-release publish The first line tells Travis that we want to run the listed tasks after a successful build. The two first lines in after_success will configure git so that python-semantic-release will be able to commit on Travis. The third installs the latest version of python-semantic-release. The last will run the publish command, which will publish a new version if the changes indicate that one is due. 4. Push some changes ^^^^^^^^^^^^^^^^^^^^ You are now ready to release automatically on Travis CI on every change to your master branch. Happy coding! .. _Travis documentation: https://docs.travis-ci.com/ .. _Travis documentation on environment variables: https://docs.travis-ci.com/user/environment-variables/#Defining-Variables-in-Repository-Settings python-semantic-release-9.21.0/docs/changelog_templates.rst000066400000000000000000001324401475670435200240530ustar00rootroot00000000000000.. _changelog-templates: Version Change Reports ====================== When using the :ref:`cmd-version` and :ref:`cmd-changelog` commands, Python Semantic Release (PSR) will generate a changelog and release notes for your project automatically in the default configuration. The changelog is rendered using the `Jinja`_ template engine, and in the default configuration, PSR will use a built-in template file to render the changelog at the file location defined by the :ref:`changelog_file ` setting. Through the use of the templating engine & the :ref:`template_dir ` configuration setting, you can customize the appearance of your changelog and release notes content. You may also generate a set of files using your custom template directory and the templates will be rendered relative to the root of your repository. Because PSR uses a third-party library, `Jinja`_, as its template engine, we do not include all the syntax within our documentation but rather you should refer to the `Template Designer Documentation`_ for guidance on how to customize the appearance of your release files. If you would like to customize the template environment itself, then certain options are available to you via :ref:`changelog environment configuration `. If you do not want to use the changelog generation features, you can disable changelog generation entirely during the :ref:`cmd-version` command by providing the :ref:`--no-changelog ` command-line option. .. _Jinja: https://jinja.palletsprojects.com/en/3.1.x/ .. _Template Designer Documentation: https://jinja.palletsprojects.com/en/3.1.x/templates/ .. _changelog-templates-default_changelog: Using the Default Changelog --------------------------- If you don't provide any custom templates in the :ref:`changelog.template_dir `, the default changelog templates will be used to render the changelog. PSR provides two default changelog output formats: 1. Markdown (``.md``), *default* 2. reStructuredText (``.rst``), *available since v9.11.0* Both formats are kept in sync with one another to display the equivalent information in the respective format. The default changelog template is located in the ``data/templates/`` directory within the PSR package. The templates are written in modular style (ie. multiple files) and during the render process are ultimately combined together to render the final changelog output. The rendering start point is the ``CHANGELOG.{FORMAT_EXT}.j2`` underneath the respective format directory. PSR provides a few configuration options to customize the default changelog output and can be found under the :ref:`changelog.default_templates ` section as well as some common configuration options under the :ref:`config-changelog` section. To toggle the output format, you only need to set the :ref:`changelog.default_templates.changelog_file ` file name to include the desired file extension (``.md`` or ``.rst``). If you would like a different extension for the resulting changelog file, but would like to still have control over the template format, you can set the :ref:`changelog.default_templates.output_format ` configuration setting to the desired format. A common and *highly-recommended* configuration option is the :ref:`changelog.exclude_commit_patterns ` setting which allows the user to define regular expressions that will exclude commits from the changelog output. This is useful to filter out change messages that are not relevant to your external consumers (ex. ``ci`` and ``test`` in the conventional commit standard) and only include the important changes that impact the consumer of your software. Another important configuration option is the :ref:`changelog.mode ` setting which determines the behavior of the changelog generation. There are 2 modes that available that described in detail below. 1. :ref:`changelog-templates-default_changelog-init` when ``mode = "init"``. 2. :ref:`changelog-templates-default_changelog-update` when ``mode = "update"``. .. _changelog-templates-default_changelog-init: Initialization Mode ^^^^^^^^^^^^^^^^^^^ When using the initialization mode, the changelog file will be created from scratch using the entire git history and **overwrite** any existing changelog file. This is the default behavior introduced in ``v8.0.0``. This is useful when you are trying to convert over to Python Semantic Release for the first time or when you want to automatically update the entire format of your changelog file. .. warning:: If you have an existing changelog in the location you have configured with the :ref:`changelog.changelog_file ` setting, PSR will overwrite the contents of this file on each release. Please make sure to refer to :ref:`changelog-templates-migrating-existing-changelog`. .. _changelog-templates-default_changelog-update: Update Mode ^^^^^^^^^^^^ .. note:: Introduced in ``v9.10.0``. When using the update mode, only the change information from the last release will be prepended into the existing changelog file (defined by the :ref:`changelog.changelog_file `). This mimics the behavior that was used in versions prior to ``v8.0.0`` before the conversion to a templating engine but now uses the `Jinja`_ to accomplish the update. This mode is best suited for managing changes over the lifetime of your project when you may have a need to make manual changes or adjustments to the changelog and its not easily recreated with a template. **How It Works** In order to insert the new release information into an existing changelog file, your changelog file must have an insertion flag to indicate where the new release information should be inserted. The default template will read in your existing changelog file, split the content based on the insertion flag, and then recombine the content (including the insertion flag) with the new release information added after the insertion flag. The insertion flag is customizable through the :ref:`changelog.insertion_flag ` setting. Generally, your insertion flag should be unique text to your changelog file to avoid any unexpected behavior. See the examples below. In the case where the insertion flag is **NOT** found in the existing changelog file, the changelog file will be re-written without any changes. If there is no existing changelog file found, then the changelog file will be initialized from scratch as if the mode was set to ``init``, except the :ref:`changelog.insertion_flag ` will be included into the newly created changelog file. .. tip:: We have accomplished changelog updating through the use of the `Jinja`_ templating and additional context filters and context variables. This is notable because in the case that you want to customize your changelog template, you now can use the same logic to enable changelog updates of your custom template! .. seealso:: - :ref:`changelog-templates-migrating-existing-changelog`. **Example** Given your existing changelog looks like the following with a :ref:`changelog.insertion_flag ` set to ````, when you run the :ref:`cmd-version` command, the new release information will be inserted after the insertion flag. **Before** .. code:: markdown # CHANGELOG ## 1.0.0 - Initial Release **After** .. code:: markdown # CHANGELOG ## v1.1.0 ### Feature - feat: added a new feature ### Fix - fix: resolved divide by zero error ## 1.0.0 - Initial Release .. _changelog-templates-default_changelog-examples: Configuration Examples ^^^^^^^^^^^^^^^^^^^^^^ 1. Goal: Configure an updating reStructuredText changelog with a custom insertion flag within ``pyproject.toml``. .. code:: toml [tool.semantic_release.changelog] mode = "update" insertion_flag = "..\n All versions below are listed in reverse chronological order" [tool.semantic_release.changelog.default_templates] changelog_file = "CHANGELOG.rst" output_format = "rst" # optional because of the file extension 2. Goal: Configure an updating Markdown changelog with custom file name and default insertion flag within a separate config file ``releaserc.json``. .. code:: json { "semantic_release": { "changelog": { "mode": "update", "default_templates": { "changelog_file": "docs/HISTORY", "output_format": "md" } } } } 3. Goal: Configure an initializing reStructuredText changelog with filtered conventional commits patterns and merge commits within a custom config file ``releaserc.toml``. .. code:: toml [semantic_release.changelog] mode = "init" default_templates = { changelog_file = "docs/CHANGELOG.rst" } exclude_commit_patterns = [ '''chore(?:\([^)]*?\))?: .+''', '''ci(?:\([^)]*?\))?: .+''', '''refactor(?:\([^)]*?\))?: .+''', '''style(?:\([^)]*?\))?: .+''', '''test(?:\([^)]*?\))?: .+''', '''build\((?!deps\): .+)''', '''Merged? .*''', ] If identified or supported by the parser, the default changelog templates will include a separate section of breaking changes and additional release information. Refer to the :ref:`commit parsing ` section to see how to write commit messages that will be properly parsed and displayed in these sections. .. _changelog-templates-default_release_notes: Using the Default Release Notes ------------------------------- PSR has the capability to generate release notes as part of the publishing of a new version similar to the changelog. The release notes are generated using a `Jinja`_ template and posted to the your remote version control server (VCS) such as GitHub, GitLab, etc during the :ref:`cmd-version` command. PSR provides a default built-in template out-of-the-box for generating release notes. The difference between the changelog and release notes is that the release notes only contain the changes for the current release. Due to the modularity of the PSR templates, the format is similar to an individual version of the default changelog but may include other version specific information. At this time, the default template for version release notes is only available in Markdown format for all VCS types. If you want to review what the default release notes look like you can use the following command to print the release notes to the console (remove any configuration for defining a custom template directory): .. code:: console # Create a current tag git tag v1.0.0 semantic-release --noop changelog --post-to-release-tag v1.0.0 The default template provided by PSR will respect the :ref:`config-changelog-default_templates-mask_initial_release` setting and will also add a comparison link to the previous release if one exists without customization. As of ``v9.18.0``, the default release notes will also include a statement to declare which license the project was released under. PSR determines which license to declare based on the value of ``project.license-expression`` in the ``pyproject.toml`` file as defined in the `PEP 639`_ specification. .. seealso:: - To personalize your release notes, see the :ref:`changelog-templates-custom_release_notes` section. .. _PEP 639: https://peps.python.org/pep-0639/ .. _changelog-templates-template-rendering: Custom Changelogs ----------------- If you would like to customize the appearance of your changelog, you can create your own custom templates and configure PSR to render your templates instead during the :ref:`cmd-version` and :ref:`cmd-changelog` commands. To use a custom template, you need to create a directory within your repository and set the :ref:`template_dir ` setting to the name of this directory. The default name is ``"templates"``. Templates are identified by giving a ``.j2`` extension to the template file. Any such templates have the ``.j2`` extension removed from the target file. Therefore, to render an output file ``foo.csv``, you should create a template called ``foo.csv.j2`` within your template directory. If you have additional files that you would like to render alongside your changelog, you can place these files within the template directory. A file within your template directory which does *not* end in ``.j2`` will not be treated as a template; it will be copied to its target location without being rendered by the template engine. .. tip:: Hidden files within the template directory (i.e. filenames that begin with a period ``"."``) are *excluded* from the rendering process. Hidden folders within the template directory are also excluded, *along with all files and folders contained within them*. This is useful for defining macros or other template components that should not be rendered individually. .. tip:: When initially starting out at customizing your own changelog templates, you should reference the default template embedded within PSR. The template directory is located at ``data/templates/`` within the PSR package. Within our templates directory we separate out each type of commit parser (e.g. angular) and the content format type (e.g. markdown). You can copy this directory to your repository's templates directory and then customize the templates to your liking. .. _changelog-templates-template-rendering-directory-structure: Directory Structure ^^^^^^^^^^^^^^^^^^^ When the templates are rendered, files within the templates directory tree are output to the location within your repository that has the *same relative path* to the root of your project as the *relative path of the template within the templates directory*. **Example** An example project has the following structure: .. code-block:: example-project/ ├── src/ │   └── example_project/ │   └── __init__.py └── ch-templates/ ├── CHANGELOG.md.j2 ├── .components/ │   └── authors.md.j2 ├── .macros.j2 ├── src/ │   └── example_project/ │   └── data/ │   └── data.json.j2 └── static/ └── config.cfg And a custom templates folder configured via the following snippet in ``pyproject.toml``: .. code-block:: toml [tool.semantic_release.changelog] template_dir = "ch-templates" After running a release with Python Semantic Release, the directory structure of the project will now look like this (excluding the template directory): .. code-block:: example-project/ ├── CHANGELOG.md ├── src/ │   └── example_project/ │   ├── data/ │   │   └── data.json │   └── __init__.py └── static/ └── config.cfg Importantly, note the following: * There is no top-level ``.macros`` file created, because hidden files are excluded from the rendering process. * There is no top-level ``.components`` directory created, because hidden folders and all files and folders contained within it are excluded from the rendering process. * The ``.components/authors.md.j2`` file is not rendered directly, however, it is used as a component to the ``CHANGELOG.md.j2`` via an ``include`` statement in the changelog template. * To render data files into the ``src/`` folder, the path to which the template should be rendered has to be created within the ``ch-templates`` directory. * The ``ch-templates/static`` folder is created at the top-level of the project, and the file ``ch-templates/static/config.cfg`` is *copied, not rendered* to the new top-level ``static`` folder. You may wish to leverage this behavior to modularize your changelog template, to define macros in a separate file, or to reference static data which you would like to avoid duplicating between your template environment and the remainder of your project. .. _changelog-templates-template-rendering-template-context: Changelog Template Context ^^^^^^^^^^^^^^^^^^^^^^^^^^ During the rendering of a directory tree, Python Semantic Release provides information about the history of the project available within the templating environment in order for it to be used to generate the changelog and other desired documents. Important project information is provided to the templating environment through the global variable ``context`` or ``ctx`` for short. Within the template environment, the ``context`` object has the following attributes: * ``changelog_insertion_flag (str)``: the insertion flag used to determine where the new release information should be inserted into the changelog file. This value is passed directly from :ref:`changelog.insertion_flag `. *Introduced in v9.10.0.* **Example Usage:** .. code:: jinja {% set changelog_parts = prev_changelog_contents.split( ctx.changelog_insertion_flag, maxsplit=1 ) %} * ``changelog_mode (Literal["init", "update"])``: the mode of the changelog generation currently being used. This can be used to determine different rendering logic. This value is passed directly from the :ref:`changelog.mode ` configuration setting. *Introduced in v9.10.0.* **Example Usage:** .. code:: jinja {% if ctx.changelog_mode == "init" %}{% include ".changelog_init.md.j2" %}{# #}{% elif ctx.changelog_mode == "update" %}{% include ".changelog_update.md.j2" %}{# #}{% endif %} * ``history (ReleaseHistory)``: the :class:`ReleaseHistory ` instance for the project (See the :ref:`Release History ` section for more information). **Example Usage:** .. code:: jinja {% set unreleased_commits = ctx.history.unreleased | dictsort %}{% for release in context.history.released.values() %}{% include ".versioned_changes.md.j2" #}{% endfor %} * ``hvcs_type (str)``: the name of the VCS server type currently configured. This can be used to determine which filters are available or different rendering logic. *Introduced in v9.6.0.* **Example Usage:** .. code:: jinja {% if ctx.hvcs_type == "github" %}{{ "29" | pull_request_url }}{# #}{% elif ctx.hvcs_type == "gitlab" %}{{ "29" | merge_request_url }}{# #}{% endif %} * ``mask_initial_release (bool)``: a boolean value indicating whether the initial release should be masked with a generic message. This value is passed directly from the :ref:`changelog.default_templates.mask_initial_release ` configuration setting. *Introduced in v9.14.0.* **Example Usage:** .. code:: jinja #}{% if releases | length == 1 and ctx.mask_initial_release %}{# # On a first release, generate a generic message #}{% include ".components/first_release.md.j2" %}{% else %}{# # Not the first release #}{% include ".components/versioned_changes.md.j2" %}{% endif %} * ``repo_name (str)``: the name of the current repository parsed from the Git url. **Example Usage:** .. code:: jinja {{ ctx.repo_name }} .. code:: markdown example_repo * ``repo_owner (str)``: the owner of the current repository parsed from the Git url. **Example Usage:** .. code:: jinja {{ ctx.repo_owner }} .. code:: markdown example_org * ``prev_changelog_file (str)``: the path to the previous changelog file that should be updated with the new release information. This value is passed directly from :ref:`changelog.changelog_file `. *Introduced in v9.10.0.* **Example Usage:** .. code:: jinja {% set prev_changelog_contents = prev_changelog_file | read_file | safe %} .. _changelog-templates-template-rendering-template-context-release-history: Release History """"""""""""""" A :py:class:`ReleaseHistory ` object has two attributes: ``released`` and ``unreleased``. The ``unreleased`` attribute is of type ``Dict[str, List[ParseResult]]``. Each commit in the current branch's commit history since the last release on this branch is grouped by the ``type`` attribute of the :py:class:`ParsedCommit ` returned by the commit parser, or if the parser returned a :py:class:`ParseError ` then the result is grouped under the ``"unknown"`` key. For this reason, every element of ``ReleaseHistory.unreleased["unknown"]`` is a :py:class:`ParseError `, and every element of every other value in ``ReleaseHistory.unreleased`` is of type :py:class:`ParsedCommit `. Typically, commit types will be ``"feature"``, ``"fix"``, ``"breaking"``, though the specific types are determined by the parser. For example, the :py:class:`EmojiCommitParser ` uses a textual representation of the emoji corresponding to the most significant change introduced in a commit (e.g. ``":boom:"``) as the different commit types. As a template author, you are free to customize how these are presented in the rendered template. .. note:: If you are using a custom commit parser following the guide at :ref:`commit_parser-custom_parser`, your custom implementations of :py:class:`ParseResult `, :py:class:`ParseError ` and :py:class:`ParsedCommit ` will be used in place of the built-in types. The ``released`` attribute is of type ``Dict[Version, Release]``. The keys of this dictionary correspond to each version released within this branch's history, and are of type :py:class:`Version `. You can use the ``as_tag()`` method to render these as the Git tag that they correspond to inside your template. A :py:class:`Release ` object has an ``elements`` attribute, which has the same structure as the ``unreleased`` attribute of a :py:class:`ReleaseHistory `; that is, ``elements`` is of type ``Dict[str, List[ParseResult]]``, where every element of ``elements["unknown"]`` is a :py:class:`ParseError `, and elements of every other value correspond to the ``type`` attribute of the :py:class:`ParsedCommit ` returned by the commit parser. The commits represented within each ``ReleaseHistory.released[version].elements`` grouping are the commits which were made between version and the release corresponding to the previous version. That is, given two releases ``Version(1, 0, 0)`` and ``Version(1, 1, 0)``, ``ReleaseHistory.released[Version(1, 0, 0)].elements`` contains only commits made after the release of ``Version(1, 0, 0)`` up to and including the release of ``Version(1, 1, 0)``. To maintain a consistent order of subsections in the changelog headed by the commit type, it's recommended to use Jinja's `dictsort `_ filter. Each :py:class:`Release ` object also has the following attributes: * ``tagger: git.Actor``: The tagger who tagged the release. * ``committer: git.Actor``: The committer who made the release commit. * ``tagged_date: datetime``: The date and time at which the release was tagged. .. seealso:: * :ref:`commit_parser-builtin` * :ref:`Commit Parser Tokens ` * `git.Actor `_ * `datetime.strftime Format Codes `_ .. _changelog-templates-custom_templates-filters: Changelog Template Filters ^^^^^^^^^^^^^^^^^^^^^^^^^^ In addition to the context variables, PSR seeds the template environment with a set of custom functions (commonly called ``filters`` in `Jinja`_ terminology) for use within the template. Filter's first argument is always piped (``|``) to the function while any additional arguments are passed in parentheses like normal function calls. The filters provided vary based on the VCS configured and available features: * ``autofit_text_width (Callable[[textStr, maxWidthInt, indent_sizeInt], textStr])``: given a text string, fit the text to the maximum width provided. This filter is useful when you want to wrap text to a specific width. The filter will attempt to break the text at word boundaries and will indent the text by the amount specified in the ``indent_size`` parameter. *Introduced in v9.12.0.* **Example Usage:** .. code:: jinja {{ "This is a long string that needs to be wrapped to a specific width" | autofit_text_width(40, 4) }} **Markdown Output:** .. code:: markdown This is a long string that needs to be wrapped to a specific width * ``convert_md_to_rst (Callable[[MdStr], RstStr])``: given a markdown string, convert it to reStructuredText format. This filter is useful when building a reStructuredText changelog but your commit messages are in markdown format. It is utilized by the default RST changelog template. It is limited in its ability to convert all markdown to reStructuredText, but it handles most common cases (bold, italics, inline-raw, etc.) within commit messages. *Introduced in v9.11.0.* **Example Usage:** .. code:: jinja {{ "\n* %s (`%s`_)\n" | format( commit.message.rstrip() | convert_md_to_rst, commit.short_hash, ) }} * ``create_pypi_url(package_name: str, version: str = "")``: given a package name and an optional version, return a URL to the PyPI page for the package. If a version is provided, the URL will point to the specific version page. If no version is provided, the URL will point to the package page. *Introduced in v9.18.0.* **Example Usage:** .. code:: jinja {{ "example-package" | create_pypi_url }} {{ "example-package" | create_pypi_url("1.0.0") }} **Markdown Output:** .. code:: markdown https://pypi.org/project/example-package https://pypi.org/project/example-package/1.0.0 * ``create_release_url (Callable[[TagStr], UrlStr])``: given a tag, return a URL to the release page on the remote vcs. This filter is useful when you want to link to the release page on the remote vcs. *Introduced in v9.18.0.* **Example Usage:** .. code:: jinja {{ "v1.0.0" | create_release_url }} **Markdown Output:** .. code:: markdown https://example.com/example/repo/releases/tag/v1.0.0 * ``create_server_url (Callable[[PathStr, AuthStr | None, QueryStr | None, FragmentStr | None], UrlStr])``: when given a path, prepend the configured vcs server host and url scheme. Optionally you can provide, a auth string, a query string or a url fragment to be normalized into the resulting url. Parameter order is as described above respectively. *Introduced in v9.6.0.* **Example Usage:** .. code:: jinja {{ "example/repo.git" | create_server_url }} {{ "example/repo" | create_server_url(None, "results=1", "section-header") }} **Markdown Output:** .. code:: markdown https://example.com/example/repo.git https://example.com/example/repo?results=1#section-header * ``create_repo_url (Callable[[RepoPathStr, QueryStr | None, FragmentStr | None], UrlStr])``: when given a repository path, prepend the configured vcs server host, and repo namespace. Optionally you can provide, an additional query string and/or a url fragment to also put in the url. Parameter order is as described above respectively. This is similar to ``create_server_url`` but includes the repo namespace and owner automatically. *Introduced in v9.6.0.* **Example Usage:** .. code:: jinja {{ "releases/tags/v1.0.0" | create_repo_url }} {{ "issues" | create_repo_url("q=is%3Aissue+is%3Aclosed") }} **Markdown Output:** .. code:: markdown https://example.com/example/repo/releases/tags/v1.0.0 https://example.com/example/repo/issues?q=is%3Aissue+is%3Aclosed * ``commit_hash_url (Callable[[hashStr], UrlStr])``: given a commit hash, return a URL to the commit in the remote. *Introduced in v8.0.0.* **Example Usage:** .. code:: jinja {{ commit.hexsha | commit_hash_url }} **Markdown Output:** .. code:: markdown https://example.com/example/repo/commit/a1b2c3d435657f5d339ba10c7b1ed81b460af51d * ``compare_url (Callable[[StartRefStr, StopRefStr], UrlStr])``: given a starting git reference and a ending git reference create a comparison url between the two references that can be opened on the remote *Introduced in v9.6.0.* **Example Usage:** .. code:: jinja {{ "v1.0.0" | compare_url("v1.1.0") }} **Markdown Output:** .. code:: markdown https://example.com/example/repo/compare/v1.0.0...v1.1.0 * ``issue_url (Callable[[IssueNumStr | IssueNumInt], UrlStr])``: given an issue number, return a URL to the issue on the remote vcs. In v9.12.2, this filter was updated to handle a string that has leading prefix symbols (ex. ``#32``) and will strip the prefix before generating the URL. *Introduced in v9.6.0, Modified in v9.12.2.* **Example Usage:** .. code:: jinja {# Add Links to issues annotated in the commit message # NOTE: commit.linked_issues is only available in v9.15.0 or greater # #}{% for issue_ref in commit.linked_issues %}{{ "- [%s](%s)" | format(issue_ref, issue_ref | issue_url) }}{% endfor %} **Markdown Output:** .. code:: markdown - [#32](https://example.com/example/repo/issues/32) * ``merge_request_url (Callable[[MergeReqStr | MergeReqInt], UrlStr])``: given a merge request number, return a URL to the merge request in the remote. This is an alias to the ``pull_request_url`` but only available for the VCS that uses the merge request terminology. In v9.12.2, this filter was updated to handle a string that has leading prefix symbols (ex. ``#29``) and will strip the prefix before generating the URL. *Introduced in v9.6.0, Modified in v9.12.2.* **Example Usage:** .. code:: jinja {{ "[%s](%s)" | format( commit.linked_merge_request, commit.linked_merge_request | merge_request_url ) }} {# commit.linked_merge_request is only available in v9.13.0 or greater #} **Markdown Output:** .. code:: markdown [#29](https://example.com/example/repo/-/merge_requests/29) * ``pull_request_url (Callable[[PullReqStr | PullReqInt], UrlStr])``: given a pull request number, return a URL to the pull request in the remote. For remote vcs' that use merge request terminology, this filter is an alias to the ``merge_request_url`` filter function. In v9.12.2, this filter was updated to handle a string that has leading prefix symbols (ex. ``#29``) and will strip the prefix before generating the URL. *Introduced in v9.6.0, Modified in v9.12.2.* **Example Usage:** .. code:: jinja {# Create a link to the merge request associated with the commit # NOTE: commit.linked_merge_request is only available in v9.13.0 or greater #}{{ "[%s](%s)" | format( commit.linked_merge_request, commit.linked_merge_request | pull_request_url ) }} **Markdown Output:** .. code:: markdown [#29](https://example.com/example/repo/pull/29) * ``format_w_official_vcs_name (Callable[[str], str])``: given a format string, insert the official VCS type name into the string and return. This filter is useful when you want to display the proper name of the VCS type in a changelog or release notes. The filter supports three different replace formats: ``%s``, ``{}``, and ``{vcs_name}``. *Introduced in v9.18.0.* **Example Usage:** .. code:: jinja {{ "%s Releases" | format_w_official_vcs_name }} {{ "{} Releases" | format_w_official_vcs_name }} {{ "{vcs_name} Releases" | format_w_official_vcs_name }} **Markdown Output:** .. code:: markdown GitHub Releases GitHub Releases GitHub Releases * ``read_file (Callable[[str], str])``: given a file path, read the file and return the contents as a string. This function was added specifically to enable the changelog update feature where it would load the existing changelog file into the templating environment to be updated. *Introduced in v9.10.0.* **Example Usage:** .. code:: jinja {% set prev_changelog_contents = prev_changelog_file | read_file | safe %} * ``sort_numerically (Callable[[Iterable[str], bool], list[str]])``: given a sequence of strings with possibly some non-number characters as a prefix or suffix, sort the strings as if they were just numbers from lowest to highest. This filter is useful when you want to sort issue numbers or other strings that have a numeric component in them but cannot be cast to a number directly to sort them. If you want to sort the strings in reverse order, you can pass a boolean value of ``True`` as the second argument. *Introduced in v9.16.0.* **Example Usage:** .. code:: jinja {{ ["#222", "#1023", "#444"] | sort_numerically }} {{ ["#222", "#1023", "#444"] | sort_numerically(True) }} **Markdown Output:** .. code:: markdown ['#222', '#444', '#1023'] ['#1023', '#444', '#222'] Availability of the documented filters can be found in the table below: ========================== ========= ===== ====== ====== **filter - hvcs_type** bitbucket gitea github gitlab ========================== ========= ===== ====== ====== autofit_text_width ✅ ✅ ✅ ✅ convert_md_to_rst ✅ ✅ ✅ ✅ create_pypi_url ✅ ✅ ✅ ✅ create_server_url ✅ ✅ ✅ ✅ create_release_url ❌ ✅ ✅ ✅ create_repo_url ✅ ✅ ✅ ✅ commit_hash_url ✅ ✅ ✅ ✅ compare_url ✅ ❌ ✅ ✅ format_w_official_vcs_name ✅ ✅ ✅ ✅ issue_url ❌ ✅ ✅ ✅ merge_request_url ❌ ❌ ❌ ✅ pull_request_url ✅ ✅ ✅ ✅ read_file ✅ ✅ ✅ ✅ sort_numerically ✅ ✅ ✅ ✅ ========================== ========= ===== ====== ====== .. seealso:: * `Filters `_ .. _changelog-templates-template-rendering-example: Example ^^^^^^^ The following template is a simple example of how to render a changelog using the PSR template context to create a changelog in Markdown format. **Configuration:** ``pyproject.toml`` .. code:: toml [tool.semantic_release.changelog] template_dir = "templates" **Template:** ``templates/CHANGELOG.md.j2`` .. code:: jinja # CHANGELOG {% for version, release in ctx.history.released.items() %}{{ "## %s (%s)" | format(version.as_tag(), release.tagged_date.strftime("%Y-%m-%d")) }}{% for type_, commits in release["elements"] if type_ != "unknown" | dictsort %}{{ "### %s" | format(type_ | title) }}{% for commit in commits %}{{ "* %s ([`%s`](%s))" | format( commit.descriptions[0] | capitalize, commit.hexsha[:7], commit.hexsha | commit_hash_url, ) }}{% endfor %}{% endfor %}{% endfor %} **Result:** ``CHANGELOG.md`` .. code:: markdown # CHANGELOG ## v1.1.0 (2022-01-01) ### Feature * Added a new feature ([`a1b2c3d`](https://github.com/example/repo/commit/a1b2c3d)) ## v1.0.0 (2021-12-31) ### Fix * Resolved divide by zero error ([`e4f5g6h`](https://github.com/example/repo/commit/e4f5g6h)) It is important to note that the template utilizes the ``context`` variable to extract the project history as well as the ``commit_hash_url`` filter to generate a URL to the remote VCS for each commit. Both of these are injected into the template environment by PSR. .. _changelog-templates-custom_release_notes: Custom Release Notes -------------------- If you would like to customize the appearance of your release notes, you can add a hidden file named ``.release_notes.md.j2`` at the root of your :ref:`changelog.template_dir `. This file will automatically be detected and used to render the release notes during the :ref:`cmd-version` and :ref:`cmd-changelog` commands. A similar :ref:`template rendering ` mechanism is used to render the release notes as is used for the changelog. There are minor differences in the context available to the release notes template but the template directory structure and modularity is maintained. .. tip:: When initially starting out at customizing your own release notes template, you should reference the default template embedded within PSR. The release notes template can be found in the directory ``data/templates//md`` within the PSR package. .. _changelog-templates-custom_release_notes-context: Release Notes Context ^^^^^^^^^^^^^^^^^^^^^ All of the changelog's :ref:`template context ` is exposed to the `Jinja`_ template when rendering the release notes. Additionally, the following two globals are available to the template: * ``release`` (:py:class:`Release `): contains metadata about the content of the release, as parsed from commit logs *Introduced in v8.0.0.* * ``version`` (:py:class:`Version `): contains metadata about the software version to be released and its ``git`` tag *Introduced in v8.0.0.* .. _changelog-templates-release-notes-template-example: Example ^^^^^^^ Below is an example template that can be used to render release notes (it's similar to GitHub's `automatically generated release notes`_): .. _Automatically generated release notes: https://docs.github.com/en/repositories/releasing-projects-on-github/automatically-generated-release-notes **Configuration:** ``pyproject.toml`` .. code:: toml [tool.semantic_release.changelog] template_dir = "templates" **Template:** ``templates/.release_notes.md.j2`` .. code:: jinja ## What's Changed {% for type_, commits in release["elements"] | dictsort %}{%- if type_ != "unknown" %}{{ "### %s" | format(type_ | title) }}{% for commit in commits %}{{ "* %s by %s in [`%s`](%s)" | format( commit.descriptions[0] | capitalize, commit.commit.author.name, commit.hexsha[:7], commit.hexsha | commit_hash_url, ) }}{%- endfor %}{% endif %}{% endfor %} **Result:** ``https://github.com/example/repo/releases/tag/v1.1.0`` .. code:: markdown ## What's Changed ### Feature * Added a new feature by John Doe in [`a1b2c3d`](https://github.com/example/repo/commit/a1b2c3d) .. _changelog-templates-migrating-existing-changelog: Migrating an Existing Changelog ------------------------------- **v9.10.0 or greater** Migrating an existing changelog is simple with Python Semantic Release! To preserve your existing changelog, follow these steps: 1. **Set the changelog.mode to "update"** in your configuration file. This will ensure that only the new release information is added to your existing changelog file. 2. **Set the changelog.insertion_flag to a unique string.** You may use the default value or set it to a unique string that is not present in your existing changelog file. This flag is used to determine where the new release information should be inserted into your existing changelog. 3. **Add the insertion flag to your changelog file.** This must match the value you set in step 2. The insertion flag should be placed in the location above where you would like the new release information to be inserted. .. note:: If you are trying to convert an existing changelog to a new format, you will need to do most of the conversion manually (or rebuild via init and modify) and make sure to include your insertion flag into the format of the new changelog. **Prior to v9.10.0** If you have an existing changelog that you would like to preserve, you will need to add the contents of the changelog file to your changelog template - either directly or via Jinja's `include `_ tag. If you would like only the history from your next release onwards to be rendered into the changelog in addition to the existing changelog, you can add an `if statement `_ based upon the versions in the keys of ``context.released``. .. _changelog-templates-upgrading-templates: Upgrading Templates ------------------- As PSR evolves, new features and improvements are added to the templating engine. If you have created your own custom templates, you may need to update them to take advantage of some new features. Below are some instructions on how to upgrade your templates to gain the benefits of the new features. .. _changelog-templates-upgrading-updating_changelog: Incrementally Updating Changelog Template ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ .. note:: This section is only relevant if you are upgrading from a version of PSR greater than v8.0.0 and prior to ``v9.10.0`` and have created your own custom templates. If you have previously created your own custom templates and would like to gain the benefits of the new updating changelog feature, you will need to make a few changes to your existing templates. The following steps are a few suggestions to help upgrade your templates but primarily you should review the embedded default templates in the PSR package for a full example. You can find the default templates at `data/templates/`__ directory. __ https://github.com/python-semantic-release/python-semantic-release/tree/master/src/semantic_release/data/templates 1. **Add a conditional to check the changelog_mode.** This will allow you to determine if you should render the entire changelog or just the new release information. See ``data/templates/*/md/CHANGELOG.md.j2`` for reference. 2. **Use the new read_file filter** to read in the existing changelog file ``ctx.prev_changelog_file``. This will allow you to include the existing changelog content in your new changelog file. See ``data/templates/*/md/.components/changelog_update.md.j2`` for reference. 3. **Split the changelog content based on the insertion flag.** This will allow you to insert the new release information after the insertion flag (``ctx.changelog_insertion_flag``). See ``data/templates/*/md/.components/changelog_update.md.j2`` for reference. 4. **Print the leading content before the insertion flag.** This ensures you maintain any content that should be included before the new release information. See ``data/templates/*/md/.components/changelog_update.md.j2`` for reference. 5. **Print your insertion flag.** This is imperative to ensure that the resulting changelog can be updated in the future. See ``data/templates/*/md/.components/changelog_update.md.j2`` for reference. 6. **Print the new release information.** Be sure to consider both unreleased and released commits during this step because of the :ref:`cmd-changelog` command that can be run at any time. See ``data/templates/*/md/.components/changelog_update.md.j2`` for reference. 7. **Print the trailing content after the insertion flag.** This ensures you maintain any content that should be included after the new release information. See ``data/templates/*/md/.components/changelog_update.md.j2`` for reference. .. tip:: Modularity of your templates is key to handling both modes of changelog generation. Reference the default templates for examples on how we handle both modes and defensively handle numerous breaking scenarios. .. tip:: If you are having trouble upgrading your templates, please post a question on the `PSR GitHub`__ __ https://github.com/python-semantic-release/python-semantic-release/issues python-semantic-release-9.21.0/docs/commands.rst000066400000000000000000000377011475670435200216530ustar00rootroot00000000000000.. _commands: Commands ======== All commands accept a ``-h/--help`` option, which displays the help text for the command and exits immediately. ``semantic-release`` does not allow interspersed arguments and options, which means that the options for ``semantic-release`` are not necessarily accepted one of the subcommands. In particular, the :ref:`cmd-main-option-noop` and :ref:`cmd-main-option-verbosity` flags must be given to the top-level ``semantic-release`` command, before the name of the subcommand. For example: Incorrect:: semantic-release version --print --noop -vv Correct:: semantic-release -vv --noop version --print With the exception of :ref:`cmd-main` and :ref:`cmd-generate-config`, all commands require that you have set up your project's configuration. To help with this step, :ref:`cmd-generate-config` can create the default configuration for you, which will allow you to tweak it to your needs rather than write it from scratch. .. _cmd-main: ``semantic-release`` ~~~~~~~~~~~~~~~~~~~~ .. _cmd-main-options: Options: -------- .. _cmd-main-option-version: ``--version`` ************** Display the version of Python Semantic Release and exit .. _cmd-main-option-noop: ``--noop`` ********** Use this flag to see what ``semantic-release`` intends to do without making changes to your project. When using this option, ``semantic-release`` can be run as many times as you wish without any side-effects. .. _cmd-main-option-verbosity: ``-v/--verbose`` ****************** Can be supplied more than once. Controls the verbosity of ``semantic-releases`` logging output (default level is ``WARNING``, use ``-v`` for ``INFO`` and ``-vv`` for ``DEBUG``). .. _cmd-main-option-config: ``-c/--config [FILE]`` ********************** Specify the configuration file which Python Semantic Release should use. This can be any of the supported formats valid for :ref:`cmd-generate-config-option-format` **Default:** pyproject.toml .. seealso:: - :ref:`configuration` .. _cmd-main-option-strict: ``--strict`` ************ Enable Strict Mode. This will cause a number of conditions to produce a non-zero exit code when passed, where they would otherwise have produced an exit code of 0. Enabling this allows, for example, certain conditions to cause failure of a CI pipeline, while omitting this flag would allow the pipeline to continue to run. .. seealso:: - :ref:`strict-mode` .. _cmd-version: ``semantic-release version`` ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Detect the semantically correct next version that should be applied to your project. By default: * Write this new version to the project metadata locations specified in the configuration file * Build the project using :ref:`config-build_command`, if specified * Create a new commit with these locations and any other assets configured to be included in a release * Tag this commit according the configured format, with a tag that uniquely identifies the version being released * Push the new tag and commit to the remote for the repository * Create a release (if supported) in the remote VCS for this tag Changelog generation is done identically to the way it is done in :ref:`cmd-changelog`, but this command additionally ensures the updated changelog is included in the release commit that is made. .. seealso:: - :ref:`cmd-changelog` - :ref:`changelog-templates` - :ref:`config-tag_format` - :ref:`config-assets` - :ref:`config-version_toml` - :ref:`config-version_variables` .. _cmd-version-options: Options: -------- .. _cmd-version-option-print: ``--print`` *********** Print the next version that will be applied, respecting the other command line options that are supplied, and exit. This flag is useful if you just want to see what the next version will be. Note that instead of printing nothing at all, if no release will be made, the current version is printed. For example, you can experiment with which versions would be applied using the other command line options:: semantic-release version --print semantic-release version --patch --print semantic-release version --prerelease --print .. _cmd-version-option-print-tag: ``--print-tag`` *************** Same as the :ref:`cmd-version-option-print` flag but prints the complete tag name (ex. ``v1.0.0`` or ``py-v1.0.0``) instead of the raw version number (``1.0.0``). .. _cmd-version-option-print-last-released: ``--print-last-released`` ************************* Print the last released version based on the Git tags. This flag is useful if you just want to see the released version without determining what the next version will be. Note if the version can not be found nothing will be printed. .. _cmd-version-option-print-last-released-tag: ``--print-last-released-tag`` ***************************** Same as the :ref:`cmd-version-option-print-last-released` flag but prints the complete tag name (ex. ``v1.0.0`` or ``py-v1.0.0``) instead of the raw version number (``1.0.0``). .. _cmd-version-option-force-level: ``--major/--minor/--patch/--prerelease`` **************************************** Force the next version to increment the major, minor or patch digits, or the prerelease revision, respectively. These flags are optional but mutually exclusive, so only one may be supplied, or none at all. Using these flags overrides the usual calculation for the next version; this can be useful, say, when a project wants to release its initial 1.0.0 version. .. warning:: Using these flags will override the configured value of ``prerelease`` (configured in your :ref:`Release Group`), **regardless of your configuration or the current version**. To produce a prerelease with the appropriate digit incremented you should also supply the :ref:`cmd-version-option-as-prerelease` flag. If you do not, using these flags will force a full (non-prerelease) version to be created. For example, suppose your project's current version is ``0.2.1-rc.1``. The following shows how these options can be combined with ``--as-prerelease`` to force different versions: .. code-block:: bash semantic-release version --prerelease --print # 0.2.1-rc.2 semantic-release version --patch --print # 0.2.2 semantic-release version --minor --print # 0.3.0 semantic-release version --major --print # 1.0.0 semantic-release version --minor --as-prerelease --print # 0.3.0-rc.1 semantic-release version --prerelease --as-prerelease --print # 0.2.1-rc.2 These options are forceful overrides, but there is no action required for subsequent releases performed using the usual calculation algorithm. Supplying ``--prerelease`` will cause Python Semantic Release to scan your project history for any previous prereleases with the same major, minor and patch versions as the latest version and the same :ref:`prerelease token` as the one passed by command-line or configuration. If one is not found, ``--prerelease`` will produce the next version according to the following format: .. code-block:: python f"{latest_version.major}.{latest_version.minor}.{latest_version.patch}-{prerelease_token}.1" However, if Python Semantic Release identifies a previous *prerelease* version with the same major, minor and patch digits as the latest version, *and* the same prerelease token as the one supplied by command-line or configuration, then Python Semantic Release will increment the revision found on that previous prerelease version in its new version. For example, if ``"0.2.1-rc.1"`` and already exists as a previous version, and the latest version is ``"0.2.1"``, invoking the following command will produce ``"0.2.1-rc.2"``: .. code-block:: bash semantic-release version --prerelease --prerelease-token "rc" --print .. warning:: This is true irrespective of the branch from which ``"0.2.1-rc.1"`` was released from. The check for previous prereleases "leading up to" this normal version is intended to help prevent collisions in git tags to an extent, but isn't foolproof. As the example shows it is possible to release a prerelease for a normal version that's already been released when using this flag, which would in turn be ignored by tools selecting versions by `SemVer precedence rules`_. .. _SemVer precedence rules: https://semver.org/#spec-item-11 .. seealso:: - :ref:`configuration` - :ref:`config-branches` .. _cmd-version-option-as-prerelease: ``--as-prerelease`` ******************* After performing the normal calculation of the next version, convert the resulting next version to a prerelease before applying it. As with :ref:`cmd-version-option-force-level`, this option is a forceful override, but no action is required to resume calculating versions as normal on the subsequent releases. The main distinction between ``--prerelease`` and ``--as-prerelease`` is that the latter will not *force* a new version if one would not have been released without supplying the flag. This can be useful when making a single prerelease on a branch that would typically release normal versions. If not specified in :ref:`cmd-version-option-prerelease-token`, the prerelease token is identified using the :ref:`Multibranch Release Configuration ` See the examples alongside :ref:`cmd-version-option-force-level` for how to use this flag. .. _cmd-version-option-prerelease-token: ``--prerelease-token [VALUE]`` ****************************** Force the next version to use the value as the prerelease token. This overrides the configured value if one is present. If not used during a release producing a prerelease version, this option has no effect. .. _cmd-version-option-build-metadata: ``--build-metadata [VALUE]`` **************************** If given, append the value to the newly calculated version. This can be used, for example, to attach a run number from a CI service or a date to the version and tag that are created. This value can also be set using the environment variable ``PSR_BUILD_METADATA`` For example, assuming a project is currently at version 1.2.3:: $ semantic-release version --minor --print 1.3.0 $ semantic-release version --minor --print --build-metadata "run.12345" 1.3.0+run.12345 .. _cmd-version-option-commit: ``--commit/--no-commit`` ************************ Whether or not to perform a ``git commit`` on modifications to source files made by ``semantic-release`` during this command invocation, and to run ``git tag`` on this new commit with a tag corresponding to the new version. If ``--no-commit`` is supplied, it may disable other options derivatively; please see below. **Default:** ``--commit`` .. seealso:: - :ref:`tag_format ` .. _cmd-version-option-tag: ``--tag/--no-tag`` ************************ Whether or not to perform a ``git tag`` to apply a tag of the corresponding to the new version during this command invocation. This option manages the tag application separate from the commit handled by the ``--commit`` option. If ``--no-tag`` is supplied, it may disable other options derivatively; please see below. **Default:** ``--tag`` .. _cmd-version-option-changelog: ``--changelog/--no-changelog`` ****************************** Whether or not to update the changelog file with changes introduced as part of the new version released. **Default:** ``--changelog`` .. seealso:: - :ref:`config-changelog` - :ref:`changelog-templates` .. _cmd-version-option-push: ``--push/--no-push`` ******************** Whether or not to push new commits and/or tags to the remote repository. **Default:** ``--no-push`` if :ref:`--no-commit ` and :ref:`--no-tag ` is also supplied, otherwise ``push`` is the default. .. _cmd-version-option-vcs-release: ``--vcs-release/--no-vcs-release`` ********************************** Whether or not to create a "release" in the remote VCS service, if supported. Currently releases in GitHub and Gitea remotes are supported. If releases aren't supported in a remote VCS, this option will not cause a command failure, but will produce a warning. **Default:** ``--no-vcs-release`` if ``--no-push`` is supplied (including where this is implied by supplying only ``--no-commit``), otherwise ``--vcs-release`` .. _cmd-version-option-skip_build: ``--skip-build`` **************** If passed, skip execution of the :ref:`build_command ` after version stamping and changelog generation. .. _cmd-publish: ``semantic-release publish`` ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Publish a distribution to a VCS release. Uploads using :ref:`config-publish` .. seealso:: - :ref:`config-publish` - :ref:`config-build_command` .. _cmd-publish-options: Options: -------- .. _cmd-publish-option-tag: ``--tag`` ********* The tag associated with the release to publish to. If not given or set to "latest", then Python Semantic Release will examine the Git tags in your repository to identify the latest version, and attempt to publish to a Release corresponding to this version. **Default:** "latest" .. _cmd-generate-config: ``semantic-release generate-config`` ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Generate default configuration for semantic-release, to help you get started quickly. You can inspect the defaults, write to a file and then edit according to your needs. For example, to append the default configuration to your pyproject.toml file, you can use the following command:: $ semantic-release generate-config -f toml --pyproject >> pyproject.toml If your project doesn't already leverage TOML files for configuration, it might better suit your project to use JSON instead:: $ semantic-release generate-config -f json If you would like to add JSON configuration to a shared file, e.g. ``package.json``, you can then simply add the output from this command as a **top-level** key to the file. **Note:** Because there is no "null" or "nil" concept in TOML (see the relevant `GitHub issue`_), configuration settings which are ``None`` by default are omitted from the default configuration. .. _`GitHub issue`: https://github.com/toml-lang/toml/issues/30 .. seealso:: - :ref:`configuration` .. _cmd-generate-config-options: Options: -------- .. _cmd-generate-config-option-format: ``-f/--format [FORMAT]`` ************************ The format that the default configuration should be generated in. Valid choices are ``toml`` and ``json`` (case-insensitive). **Default:** toml .. _cmd-generate-config-option-pyproject: ``--pyproject`` *************** If used alongside ``--format json``, this option has no effect. When using ``--format=toml``, if specified the configuration will sit under a top-level key of ``tool.semantic_release`` to comply with `PEP 518`_; otherwise, the configuration will sit under a top-level key of ``semantic_release``. .. _PEP 518: https://peps.python.org/pep-0518/#tool-table .. _cmd-changelog: ``semantic-release changelog`` ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Generate and optionally publish a changelog for your project. The changelog is generated based on a template which can be customized. Python Semantic Release uses Jinja_ as its templating engine; as a result templates need to be written according to the `Template Designer Documentation`_. .. _Jinja: https://jinja.palletsprojects.com/ .. _`Template Designer Documentation`: https://jinja.palletsprojects.com/en/3.1.x/templates/ .. seealso:: - :ref:`config-changelog` - :ref:`config-changelog-environment` - :ref:`changelog-templates` Options: -------- .. _cmd-changelog-option-post-to-release-tag: ``--post-to-release-tag [TAG]`` ******************************* If supplied, attempt to find a release in the remote VCS corresponding to the Git tag ``TAG``, and post the generated changelog to that release. If the tag exists but no corresponding release is found in the remote VCS, then Python Semantic Release will attempt to create one. If using this option, the relevant authentication token *must* be supplied via the relevant environment variable. For more information, see :ref:`index-creating-vcs-releases`. python-semantic-release-9.21.0/docs/commit_parsing.rst000066400000000000000000001071311475670435200230600ustar00rootroot00000000000000.. _commit-parsing: Commit Parsing ============== One of the core components of Python Semantic Release (PSR) is the commit parser. The commit parser is responsible for parsing a Project's Git Repository commit history to extract insights about project changes and make decisions based on this insight. The primary decision that PSR makes based on the commit history is whether or not to release a new version of the project, and if so, what version number to release. This decision is made based on the commit message descriptions of the change impact introduced by the commit. The change impact describes the impact to the end consumers of the project. Depending on the type of change, the version number will be incremented according to the `Semantic Versioning`_ specification (semver). It is the commit parser's job to extract the change impact from the commit message to determine the severity of the changes and then subsequently determine the semver level that the version should be bumped to for the next release. The commit parser is also responsible for interpreting other aspects of the commit message which can be used to generate a helpful and detailed changelog. This includes extracting the type of change, the scope of the change, any breaking change descriptions, any linked pull/merge request numbers, and any linked issue numbers. PSR provides several :ref:`built-in commit parsers ` to handle a variety of different commit message styles. If the built-in parsers do not meet your needs, you can write your own :ref:`custom parser ` to handle your specific commit message style. .. warning:: PSR's built-in commit parsers are designed to be flexible enough to provide a convenient way to generate the most effective changelogs we can, which means some features are added beyond the scope of the original commit message style guidelines. Other tools may not follow the same conventions as PSR's guideline extensions, so if you plan to use any similar programs in tandem with PSR, you should be aware of the differences in feature support and fall back to the official format guidelines if necessary. .. _Semantic Versioning: https://semver.org/ ---- .. _commit_parser-builtin: Built-in Commit Parsers ----------------------- The following parsers are built in to Python Semantic Release: - :ref:`ConventionalCommitParser ` - :ref:`AngularCommitParser ` *(deprecated in v9.19.0)* - :ref:`EmojiCommitParser ` - :ref:`ScipyCommitParser ` - :ref:`TagCommitParser ` *(deprecated in v9.12.0)* ---- .. _commit_parser-builtin-conventional: Conventional Commits Parser """"""""""""""""""""""""""" *Introduced in v9.19.0* A parser that is designed to parse commits formatted according to the `Conventional Commits Specification`_. The parser is implemented with the following logic in relation to how PSR's core features: - **Version Bump Determination**: This parser extracts the commit type from the subject line of the commit (the first line of a commit message). This type is matched against the configuration mapping to determine the level bump for the specific commit. If the commit type is not found in the configuration mapping, the commit is considered a non-parsable commit and will return it as a ParseError object and ultimately a commit of type ``"unknown"``. The configuration mapping contains lists of commit types that correspond to the level bump for each commit type. Some commit types are still valid do not trigger a level bump, such as ``"chore"`` or ``"docs"``. You can also configure the default level bump :ref:`commit_parser_options.default_level_bump ` if desired. To trigger a major release, the commit message body must contain a paragraph that begins with ``BREAKING CHANGE:``. This will override the level bump determined by the commit type. - **Changelog Generation**: PSR will group commits in the changelog by the commit type used in the commit message. The commit type shorthand is converted to a more human-friendly section heading and then used as the version section title of the changelog and release notes. Under the section title, the parsed commit descriptions are listed out in full. If the commit includes an optional scope, then the scope is prefixed on to the first line of the commit description. If a commit has any breaking change prefixed paragraphs in the commit message body, those paragraphs are separated out into a "Breaking Changes" section in the changelog (Breaking Changes section is available from the default changelog in v9.15.0). Each breaking change paragraph is listed in a bulleted list format across the entire version. A single commit is allowed to have more than one breaking change prefixed paragraph (as opposed to the `Conventional Commits Specification`_). Commits with an optional scope and a breaking change will have the scope prefixed on to the breaking change paragraph. Parsing errors will return a ParseError object and ultimately a commit of type ``"unknown"``. Unknown commits are consolidated into an "Unknown" section in the changelog by the default template. To remove unwanted commits from the changelog that normally are placed in the "unknown" section, consider the use of the configuration option :ref:`changelog.exclude_commit_patterns ` to ignore those commit styles. - **Pull/Merge Request Identifier Detection**: This parser implements PSR's :ref:`commit_parser-builtin-linked_merge_request_detection` to identify and extract pull/merge request numbers. The parser will return a string value if a pull/merge request number is found in the commit message. If no pull/merge request number is found, the parser will return an empty string. - **Linked Issue Identifier Detection**: This parser implements PSR's :ref:`commit_parser-builtin-issue_number_detection` to identify and extract issue numbers. The parser will return a tuple of issue numbers as strings if any are found in the commit message. If no issue numbers are found, the parser will return an empty tuple. - **Squash Commit Evaluation**: This parser implements PSR's :ref:`commit_parser-builtin-squash_commit_evaluation` to identify and extract each commit message as a separate commit message within a single squashed commit. You can toggle this feature on/off via the :ref:`config-commit_parser_options` setting. - **Release Notice Footer Detection**: This parser implements PSR's :ref:`commit_parser-builtin-release_notice_footer_detection`, which is a custom extension to traditional `Conventional Commits Specification`_ to use the ``NOTICE`` keyword as a git footer to document additional release information that is not considered a breaking change. **Limitations**: - Commits with the ``revert`` type are not currently supported. Track the implementation of this feature in the issue `#402`_. If no commit parser options are provided via the configuration, the parser will use PSR's built-in :py:class:`defaults `. .. _#402: https://github.com/python-semantic-release/python-semantic-release/issues/402 .. _Conventional Commits Specification: https://www.conventionalcommits.org/en/v1.0.0 ---- .. _commit_parser-builtin-angular: Angular Commit Parser """"""""""""""""""""" .. warning:: This parser was deprecated in ``v9.19.0``. It will be removed in a future release. This parser is being replaced by the :ref:`commit_parser-builtin-conventional`. A parser that is designed to parse commits formatted according to the `Angular Commit Style Guidelines`_. The parser is implemented with the following logic in relation to how PSR's core features: - **Version Bump Determination**: This parser extracts the commit type from the subject line of the commit (the first line of a commit message). This type is matched against the configuration mapping to determine the level bump for the specific commit. If the commit type is not found in the configuration mapping, the commit is considered a non-parsable commit and will return it as a ParseError object and ultimately a commit of type ``"unknown"``. The configuration mapping contains lists of commit types that correspond to the level bump for each commit type. Some commit types are still valid do not trigger a level bump, such as ``"chore"`` or ``"docs"``. You can also configure the default level bump :ref:`commit_parser_options.default_level_bump ` if desired. To trigger a major release, the commit message body must contain a paragraph that begins with ``BREAKING CHANGE:``. This will override the level bump determined by the commit type. - **Changelog Generation**: PSR will group commits in the changelog by the commit type used in the commit message. The commit type shorthand is converted to a more human-friendly section heading and then used as the version section title of the changelog and release notes. Under the section title, the parsed commit descriptions are listed out in full. If the commit includes an optional scope, then the scope is prefixed on to the first line of the commit description. If a commit has any breaking change prefixed paragraphs in the commit message body, those paragraphs are separated out into a "Breaking Changes" section in the changelog (Breaking Changes section is available from the default changelog in v9.15.0). Each breaking change paragraph is listed in a bulleted list format across the entire version. A single commit is allowed to have more than one breaking change prefixed paragraph (as opposed to the `Angular Commit Style Guidelines`_). Commits with an optional scope and a breaking change will have the scope prefixed on to the breaking change paragraph. Parsing errors will return a ParseError object and ultimately a commit of type ``"unknown"``. Unknown commits are consolidated into an "Unknown" section in the changelog by the default template. To remove unwanted commits from the changelog that normally are placed in the "unknown" section, consider the use of the configuration option :ref:`changelog.exclude_commit_patterns ` to ignore those commit styles. - **Pull/Merge Request Identifier Detection**: This parser implements PSR's :ref:`commit_parser-builtin-linked_merge_request_detection` to identify and extract pull/merge request numbers. The parser will return a string value if a pull/merge request number is found in the commit message. If no pull/merge request number is found, the parser will return an empty string. *Feature available in v9.13.0+.* - **Linked Issue Identifier Detection**: This parser implements PSR's :ref:`commit_parser-builtin-issue_number_detection` to identify and extract issue numbers. The parser will return a tuple of issue numbers as strings if any are found in the commit message. If no issue numbers are found, the parser will return an empty tuple. *Feature available in v9.15.0+.* - **Squash Commit Evaluation**: This parser implements PSR's :ref:`commit_parser-builtin-squash_commit_evaluation` to identify and extract each commit message as a separate commit message within a single squashed commit. You can toggle this feature on/off via the :ref:`config-commit_parser_options` setting. *Feature available in v9.17.0+.* - **Release Notice Footer Detection**: This parser implements PSR's :ref:`commit_parser-builtin-release_notice_footer_detection`, which is a custom extension to traditional `Angular Commit Style Guidelines`_ to use the ``NOTICE`` keyword as a git footer to document additional release information that is not considered a breaking change. *Feature available in v9.18.0+.* **Limitations**: - Commits with the ``revert`` type are not currently supported. Track the implementation of this feature in the issue `#402`_. If no commit parser options are provided via the configuration, the parser will use PSR's built-in :py:class:`defaults `. .. _#402: https://github.com/python-semantic-release/python-semantic-release/issues/402 .. _Angular Commit Style Guidelines: https://github.com/angular/angular.js/blob/master/DEVELOPERS.md#commits ---- .. _commit_parser-builtin-emoji: Emoji Commit Parser """"""""""""""""""" A parser that is designed to parse commits formatted to the `Gitmoji Specification`_ with a few additional features that the specification does not cover but provide similar functionality expected from a Semantic Release tool. As the `Gitmoji Specification`_ describes, the emojis can be specified in either the unicode format or the shortcode text format. See the `Gitmoji Specification`_ for the pros and cons for which format to use, but regardless, the configuration options must match the format used in the commit messages. The parser is implemented with the following logic in relation to how PSR's core features: - **Version Bump Determination**: This parser only looks for emojis in the subject line of the commit (the first line of a commit message). If more than one emoji is found, the emoji configured with the highest priority is selected for the change impact for the specific commit. The emoji with the highest priority is the one configured in the ``major`` configuration option, followed by the ``minor``, and ``patch`` in descending priority order. If no emoji is found in the subject line, the commit is classified as other and will default to the level bump defined by the configuration option :ref:`commit_parser_options.default_level_bump `. - **Changelog Generation**: PSR will group commits in the changelog by the emoji type used in the commit message. The emoji is used as the version section title and the commit descriptions are listed under that section. No emojis are removed from the commit message so each will be listed in the changelog and release notes. When more than one emoji is found in the subject line of a commit, the emoji with the highest priority is the one that will influence the grouping of the commit in the changelog. Commits containing no emojis or non-configured emojis are consolidated into an "Other" section. To remove unwanted commits from the changelog that would normally be added into the "other" section, consider the use of the configuration option :ref:`changelog.exclude_commit_patterns ` to ignore those commit styles. - **Pull/Merge Request Identifier Detection**: This parser implements PSR's :ref:`commit_parser-builtin-linked_merge_request_detection` to identify and extract pull/merge request numbers. The parser will return a string value if a pull/merge request number is found in the commit message. If no pull/merge request number is found, the parser will return an empty string. *Feature available in v9.13.0+.* - **Linked Issue Identifier Detection**: [Disabled by default] This parser implements PSR's :ref:`commit_parser-builtin-issue_number_detection` to identify and extract issue numbers. The parser will return a tuple of issue numbers as strings if any are found in the commit message. If no issue numbers are found, the parser will return an empty tuple. This feature is disabled by default since it is not a part of the `Gitmoji Specification`_ but can be enabled by setting the configuration option ``commit_parser_options.parse_linked_issues`` to ``true``. *Feature available in v9.15.0+.* - **Squash Commit Evaluation**: This parser implements PSR's :ref:`commit_parser-builtin-squash_commit_evaluation` to identify and extract each commit message as a separate commit message within a single squashed commit. You can toggle this feature on/off via the :ref:`config-commit_parser_options` setting. *Feature available in v9.17.0+.* - **Release Notice Footer Detection**: This parser implements PSR's :ref:`commit_parser-builtin-release_notice_footer_detection`, which is a custom extension that uses the ``NOTICE`` keyword as a git footer to document additional release information that is not considered a breaking change. *Feature available in v9.18.0+.* If no commit parser options are provided via the configuration, the parser will use PSR's built-in :py:class:`defaults `. .. _Gitmoji Specification: https://gitmoji.dev/specification ---- .. _commit_parser-builtin-scipy: Scipy Commit Parser """"""""""""""""""" A parser that is designed to parse commits formatted according to the `Scipy Commit Style Guidelines`_. This is essentially a variation of the `Angular Commit Style Guidelines`_ with all different commit types. Because of this small variance, this parser only extends our :ref:`commit_parser-builtin-angular` parser with pre-defined scipy commit types in the default Scipy Parser Options and all other features are inherited. If no commit parser options are provided via the configuration, the parser will use PSR's built-in :py:class:`defaults `. .. _Scipy Commit Style Guidelines: https://scipy.github.io/devdocs/dev/contributor/development_workflow.html#writing-the-commit-message ---- .. _commit_parser-builtin-tag: Tag Commit Parser """"""""""""""""" .. warning:: This parser was deprecated in ``v9.12.0``. It will be removed in a future release. The original parser from v1.0.0 of Python Semantic Release. Similar to the emoji parser above, but with less features. If no commit parser options are provided via the configuration, the parser will use PSR's built-in :py:class:`defaults `. ---- .. _commit_parser-builtin-linked_merge_request_detection: Common Linked Merge Request Detection """"""""""""""""""""""""""""""""""""" *Introduced in v9.13.0* All of the PSR built-in parsers implement common pull/merge request identifier detection logic to extract pull/merge request numbers from the commit message regardless of the VCS platform. The parsers evaluate the subject line for a parenthesis-enclosed number at the end of the line. PSR's parsers will return a string value if a pull/merge request number is found in the commit message. If no pull/merge request number is found, the parsers will return an empty string. **Examples**: *All of the following will extract a MR number of "x123", where 'x' is the character prefix* 1. BitBucket: ``Merged in feat/my-awesome-feature (pull request #123)`` 2. GitHub: ``feat: add new feature (#123)`` 3. GitLab: ``fix: resolve an issue (!123)`` ---- .. _commit_parser-builtin-issue_number_detection: Common Issue Identifier Detection """"""""""""""""""""""""""""""""" *Introduced in v9.15.0* All of the PSR built-in parsers implement common issue identifier detection logic, which is similar to many VCS platforms such as GitHub, GitLab, and BitBucket. The parsers will look for common issue closure text prefixes in the `Git Trailer format`_ in the commit message to identify and extract issue numbers. The detection logic is not strict to any specific issue tracker as we try to provide a flexible approach to identifying issue numbers but in order to be flexible, it is **required** to the use the `Git Trailer format`_ with a colon (``:``) as the token separator. PSR attempts to support all variants of issue closure text prefixes, but not all will work for your VCS. PSR supports the following case-insensitive prefixes and their conjugations (plural, present, & past tense): - close (closes, closing, closed) - fix (fixes, fixing, fixed) - resolve (resolves, resolving, resolved) - implement (implements, implementing, implemented) PSR also allows for a more flexible approach to identifying more than one issue number without the need of extra git trailers (although PSR does support multiple git trailers). PSR support various list formats which can be used to identify more than one issue in a list. This format will not necessarily work on your VCS. PSR currently support the following list formats: - comma-separated (ex. ``Closes: #123, #456, #789``) - space-separated (ex. ``resolve: #123 #456 #789``) - semicolon-separated (ex. ``Fixes: #123; #456; #789``) - slash-separated (ex. ``close: #123/#456/#789``) - ampersand-separated (ex. ``Implement: #123 & #789``) - and-separated (ex. ``Resolve: #123 and #456 and #789``) - mixed (ex. ``Closed: #123, #456, and #789`` or ``Fixes: #123, #456 & #789``) All the examples above use the most common issue number prefix (``#``) but PSR is flexible to support other prefixes used by VCS platforms or issue trackers such as JIRA (ex. ``ABC-###``). The parsers will return a tuple of issue numbers as strings if any are found in the commit message. Strings are returned to ensure that the any issue number prefix characters are preserved (ex. ``#123`` or ``ABC-123``). If no issue numbers are found, the parsers will return an empty tuple. **References**: - `BitBucket: Resolving Issues Automatically `_ - `GitHub: Linking Issue to PR `_ - `GitLab: Default Closing Patterns `_ .. _Git Trailer format: https://git-scm.com/docs/git-interpret-trailers ---- .. _commit_parser-builtin-release_notice_footer_detection: Common Release Notice Footer Detection """""""""""""""""""""""""""""""""""""" *Introduced in v9.18.0** All of the PSR built-in parsers implement common release notice footer detection logic to identify and extract a ``NOTICE`` git trailer that documents any additional release information the developer wants to provide to the software consumer. The idea extends from the concept of the ``BREAKING CHANGE:`` git trailer to document any breaking change descriptions but the ``NOTICE`` trailer is intended to document any information that is below the threshold of a breaking change while still important for the software consumer to be aware of. Common uses would be to provide deprecation warnings or more detailed change usage information for that release. Parsers will collapse single newlines after the ``NOTICE`` trailer into a single line paragraph. Commits may have more than one ``NOTICE`` trailer in a single commit message. Each :py:class:`ParsedCommit ` will have a ``release_notices`` attribute that is a tuple of string paragraphs to identify each release notice. In the default changelog and release notes template, these release notices will be formatted into their own section called **Additional Release Information**. Each will include any commit scope defined and each release notice in alphabetical order. ---- .. _commit_parser-builtin-squash_commit_evaluation: Common Squash Commit Evaluation """"""""""""""""""""""""""""""" *Introduced in v9.17.0* All of the PSR built-in parsers implement common squash commit evaluation logic to identify and extract individual commit messages from a single squashed commit. The parsers will look for common squash commit delimiters and multiple matches of the commit message format to identify each individual commit message that was squashed. The parsers will return a list containing each commit message as a separate commit object. Squashed commits will be evaluated individually for both the level bump and changelog generation. If no squash commits are found, a list with the single commit object will be returned. Currently, PSR has been tested against GitHub, BitBucket, and official ``git`` squash merge commit messages. GitLab does not have a default template for squash commit messages but can be customized per project or server. If you are using GitLab, you will need to ensure that the squash commit message format is similar to the example below. **Example**: *The following example will extract three separate commit messages from a single GitHub formatted squash commit message of conventional commit style:* .. code-block:: text feat(config): add new config option (#123) * refactor(config): change the implementation of config loading * docs(configuration): defined new config option for the project When parsed with the default conventional-commit parser with squash commits toggled on, the version bump will be determined by the highest level bump of the three commits (in this case, a minor bump because of the feature commit) and the release notes would look similar to the following: .. code-block:: markdown ## Features - **config**: add new config option (#123) ## Documentation - **configuration**: defined new config option for the project (#123) ## Refactoring - **config**: change the implementation of config loading (#123) Merge request numbers and commit hash values will be the same across all extracted commits. Additionally, any :ref:`config-changelog-exclude_commit_patterns` will be applied individually to each extracted commit so if you are have an exclusion match for ignoring ``refactor`` commits, the second commit in the example above would be excluded from the changelog. .. important:: When squash commit evaluation is enabled, if you squashed a higher level bump commit into the body of a lower level bump commit, the higher level bump commit will be evaluated as the level bump for the entire squashed commit. This includes breaking change descriptions. ---- .. _commit_parser-builtin-customization: Customization """"""""""""" Each of the built-in parsers can be customized by providing overrides in the :ref:`config-commit_parser_options` setting of the configuration file. This can be used to toggle parsing features on and off or to add, modify, or remove the commit types that are used to determine the level bump for a commit. Review the API documentation for the specific parser's options class to see what changes to the default behavior can be made. ---- .. _commit_parser-custom_parser: Custom Parsers -------------- Custom parsers can be written to handle commit message styles that are not covered by the built-in parsers or by option customization of the built-in parsers. Python Semantic Release provides several building blocks to help you write your parser. To maintain compatibility with how Python Semantic Release will invoke your parser, you should use the appropriate object as described below, or create your own object as a subclass of the original which maintains the same interface. Type parameters are defined where appropriate to assist with static type-checking. The :ref:`commit_parser ` option, if set to a string which does not match one of Python Semantic Release's built-in commit parsers, will be used to attempt to dynamically import a custom commit parser class. In order to use your custom parser, you must provide how to import the module and class via the configuration option. There are two ways to provide the import string: 1. **File Path & Class**: The format is ``"path/to/module_file.py:ClassName"``. This is the easiest way to provide a custom parser. This method allows you to store your custom parser directly in the repository with no additional installation steps. PSR will locate the file, load the module, and instantiate the class. Relative paths are recommended and it should be provided relative to the current working directory. This import variant is available in v9.16.0 and later. 2. **Module Path & Class**: The format is ``"package.module_name:ClassName"``. This method allows you to store your custom parser in a package that is installed in the same environment as PSR. This method is useful if you want to share your custom parser across multiple repositories. To share it across multiple repositories generally you will need to publish the parser as its own separate package and then ``pip install`` it into the current virtual environment. You can also keep it in the same repository as your project as long as it is in the current directory of the virtual environment and is locatable by the Python import system. You may need to set the ``PYTHONPATH`` environment variable if you have a more complex directory structure. This import variant is available in v8.0.0 and later. To test that your custom parser is importable, you can run the following command in the directory where PSR will be executed: .. code-block:: bash python -c "from package.module_name import ClassName" .. note:: Remember this is basic python import rules so the package name is optional and generally packages are defined by a directory with ``__init__.py`` files. .. _commit_parser-tokens: Tokens """""" The tokens built into Python Semantic Release's commit parsing mechanism are inspired by both the error-handling mechanism in `Rust's error handling`_ and its implementation in `black`_. It is documented that `catching exceptions in Python is slower`_ than the equivalent guard implemented using ``if/else`` checking when exceptions are actually caught, so although ``try/except`` blocks are cheap if no exception is raised, commit parsers should always return an object such as :py:class:`ParseError ` instead of raising an error immediately. This is to avoid catching a potentially large number of parsing errors being caught as the commit history of a repository is being parsed. Python Semantic Release does not raise an exception if a commit cannot be parsed. Python Semantic Release uses :py:class:`ParsedCommit ` as the return type of a successful parse operation, and :py:class:`ParseError ` as the return type from an unsuccessful parse of a commit. You should review the API documentation linked to understand the fields available on each of these objects. It is important to note, the :py:class:`ParseError ` implements an additional method, ``raise_error``. This method raises a :py:class:`CommitParseError ` with the message contained in the ``error`` field, as a convenience. In Python Semantic Release, the type ``semantic_release.commit_parser.token.ParseResult`` is defined as ``ParseResultType[ParsedCommit, ParseError]``, as a convenient shorthand. :py:class:`ParseResultType ` is a generic type, which is the ``Union`` of its two type parameters. One of the types in this union should be the type returned on a successful parse of the ``commit``, while the other should be the type returned on an unsuccessful parse of the ``commit``. A custom parser result type, therefore, could be implemented as follows: * ``MyParsedCommit`` subclasses :py:class:`ParsedCommit ` * ``MyParseError`` subclasses :py:class:`ParseError ` * ``MyParseResult = ParseResultType[MyParsedCommit, MyParseError]`` Internally, Python Semantic Release uses ``isinstance()`` to determine if the result of parsing a commit was a success or not, so you should check that your custom result and error types return ``True`` from ``isinstance(, ParsedCommit)`` and ``isinstance(, ParseError)`` respectively. While it's not advisable to remove any of the fields that are available in the built-in token types, currently only the ``bump`` field of the successful result type is used to determine how the version should be incremented as part of this release. However, it's perfectly possible to add additional fields to your tokens which can be populated by your parser; these fields will then be available on each commit in your :ref:`changelog template `, so you can make additional information available. .. _Rust's error handling: https://doc.rust-lang.org/book/ch09-02-recoverable-errors-with-result.html .. _black: https://github.com/psf/black/blob/main/src/black/rusty.py .. _catching exceptions in Python is slower: https://docs.python.org/3/faq/design.html#how-fast-are-exceptions .. _namedtuple: https://docs.python.org/3/library/typing.html#typing.NamedTuple .. _commit_parser-parser-options: Parser Options """""""""""""" When writing your own parser, you should accompany the parser with an "options" class which accepts the appropriate keyword arguments. This class' ``__init__`` method should store the values that are needed for parsing appropriately. Python Semantic Release will pass any configuration options from the configuration file's :ref:`commit_parser_options `, into your custom parser options class. To ensure that the configuration options are passed correctly, the options class should inherit from the :py:class:`ParserOptions ` class. The "options" class is used to validate the options which are configured in the repository, and to provide default values for these options where appropriate. .. _commit-parsing-commit-parsers: Commit Parsers """""""""""""" The commit parsers that are built into Python Semantic Release implement an instance method called ``parse``, which takes a single parameter ``commit`` of type `git.objects.commit.Commit `_, and returns the type ``ParseResultType``. To be compatible with Python Semantic Release, a commit parser must subclass :py:class:`CommitParser `. A subclass must implement the following: * A class-level attribute ``parser_options``, which must be set to :py:class:`ParserOptions ` or a subclass of this. * An ``__init__`` method which takes a single parameter, ``options``, that should be of the same type as the class' ``parser_options`` attribute. * A method, ``parse``, which takes a single parameter ``commit`` that is of type `git.objects.commit.Commit `_, and returns :py:class:`ParseResult `, or a subclass of this. By default, the constructor for :py:class:`CommitParser ` will set the ``options`` parameter on the ``options`` attribute of the parser, so there is no need to override this in order to access ``self.options`` during the ``parse`` method. However, if you have any parsing logic that needs to be done only once, it may be a good idea to perform this logic during parser instantiation rather than inside the ``parse`` method. The parse method will be called once per commit in the repository's history during parsing, so the effect of slow parsing logic within the ``parse`` method will be magnified significantly for projects with sizeable Git histories. Commit Parsers have two type parameters, "TokenType" and "OptionsType". The first is the type which is returned by the ``parse`` method, and the second is the type of the "options" class for this parser. Therefore, a custom commit parser could be implemented via: .. code-block:: python class MyParserOptions(semantic_release.ParserOptions): def __init__(self, message_prefix: str) -> None: self.prefix = message_prefix * 2 class MyCommitParser( semantic_release.CommitParser[semantic_release.ParseResult, MyParserOptions] ): def parse(self, commit: git.objects.commit.Commit) -> semantic_release.ParseResult: ... .. _gitpython-commit-object: https://gitpython.readthedocs.io/en/stable/reference.html#module-git.objects.commit python-semantic-release-9.21.0/docs/conf.py000066400000000000000000000044641475670435200206170ustar00rootroot00000000000000import os import sys sys.path.insert(0, os.path.abspath(".")) sys.path.insert(0, os.path.abspath("..")) import semantic_release # noqa: E402 author_name = "Python Semantic Release Team" # -- General configuration ------------------------------------------------ extensions = [ "sphinx.ext.autodoc", "sphinx.ext.doctest", "sphinx.ext.coverage", "sphinx.ext.viewcode", "sphinxcontrib.apidoc", ] autodoc_default_options = {"ignore-module-all": True} templates_path = ["_templates"] source_suffix = ".rst" master_doc = "index" project = "python-semantic-release" copyright = f"2024, {author_name}" # noqa: A001 version = semantic_release.__version__ release = semantic_release.__version__ exclude_patterns = ["_build"] pygments_style = "sphinx" html_theme = "furo" htmlhelp_basename = "python-semantic-releasedoc" # -- Automatically run sphinx-apidoc -------------------------------------- docs_path = os.path.dirname(__file__) apidoc_output_dir = os.path.join(docs_path, "api") apidoc_module_dir = os.path.join(docs_path, "..", "src") apidoc_separate_modules = True apidoc_module_first = True apidoc_extra_args = ["-d", "3"] def setup(app): # type: ignore[no-untyped-def] # noqa: ARG001,ANN001,ANN201 pass # -- Options for LaTeX output --------------------------------------------- latex_documents = [ ( "index", "python-semantic-release.tex", "python-semantic-release Documentation", author_name, "manual", ), ] # -- Options for manual page output --------------------------------------- man_pages = [ ( "index", "python-semantic-release", "python-semantic-release Documentation", [author_name], 1, ) ] # -- Options for Texinfo output ------------------------------------------- texinfo_documents = [ ( "index", "python-semantic-release", "python-semantic-release Documentation", author_name, "python-semantic-release", "One line description of project.", "Miscellaneous", ), ] # -- Options for Epub output ---------------------------------------------- # Bibliographic Dublin Core info. epub_title = "python-semantic-release" epub_author = author_name epub_publisher = author_name epub_copyright = copyright epub_exclude_files = ["search.html"] python-semantic-release-9.21.0/docs/configuration.rst000066400000000000000000001342221475670435200227150ustar00rootroot00000000000000.. _configuration: Configuration ============= Configuration is read from a file which can be specified using the :ref:`\\\\-\\\\-config ` option to :ref:`cmd-main`. Python Semantic Release currently supports a configuration in either TOML or JSON format, and will attempt to auto-detect and parse either format. When using a JSON-format configuration file, Python Semantic Release looks for its settings beneath a top-level ``semantic_release`` key; when using a TOML-format configuration file, Python Semantic Release first checks for its configuration under the table ``[tool.semantic_release]`` (in line with the convention for Python tools to require their configuration under the top-level ``tool`` table in their ``pyproject.toml`` file), followed by ``[semantic_release]``, which may be more desirable if using a file other than the default ``pyproject.toml`` for configuration. The examples on this page are given in TOML format, however there is no limitation on using JSON instead. In fact, if you would like to convert any example below to its JSON equivalent, the following commands will do this for you (in Bash): .. code-block:: bash export TEXT="" cat <`, such as ``GH_TOKEN``, in your configuration file, and Python Semantic Release will do the rest, as seen below. .. code-block:: toml [semantic_release.remote.token] env = "GH_TOKEN" Given basic TOML syntax compatibility, this is equivalent to: .. code-block:: toml [semantic_release.remote] token = { env = "GH_TOKEN" } The general format for specifying that some configuration should be sourced from an environment variable is: .. code-block:: toml [semantic_release.variable] env = "ENV_VAR" default_env = "FALLBACK_ENV_VAR" default = "default value" In this structure: * ``env`` represents the environment variable that Python Semantic Release will search for * ``default_env`` is a fallback environment variable to read in case the variable specified by ``env`` is not set. This is optional - if not specified then no fallback will be used. * ``default`` is a default value to use in case the environment variable specified by ``env`` is not set. This is optional - if ``default`` is not specified then the environment variable specified by ``env`` is considered required. .. _config-root: ``semantic_release`` settings ----------------------------- The following sections outline all the definitions and descriptions of each supported configuration setting. If there are type mis-matches, PSR will throw validation errors upon load. If a setting is not provided, than PSR will fill in the value with the default value. Python Semantic Release expects a root level key to start the configuration definition. Make sure to use the correct root key depending on the configuration format you are using. .. note:: If you are using ``pyproject.toml``, this heading should include the ``tool`` prefix as specified within PEP 517, resulting in ``[tool.semantic_release]``. .. note:: If you are using a ``releaserc.toml``, use ``[semantic_release]`` as the root key .. note:: If you are using a ``releaserc.json``, ``semantic_release`` must be the root key in the top level dictionary. ---- .. _config-allow_zero_version: ``allow_zero_version`` """""""""""""""""""""" *Introduced in v9.2.0* **Type:** ``bool`` This flag controls whether or not Python Semantic Release will use version numbers aligning with the ``0.x.x`` pattern. If set to ``true`` and starting at ``0.0.0``, a minor bump would set the next version as ``0.1.0`` whereas a patch bump would set the next version as ``0.0.1``. A breaking change (ie. major bump) would set the next version as ``1.0.0`` unless the :ref:`config-major_on_zero` is set to ``false``. If set to ``false``, Python Semantic Release will consider the first possible version to be ``1.0.0``, regardless of patch, minor, or major change level. Additionally, when ``allow_zero_version`` is set to ``false``, the :ref:`config-major_on_zero` setting is ignored. **Default:** ``true`` ---- .. _config-assets: ``assets`` """""""""" **Type:** ``list[str]`` One or more paths to additional assets that should committed to the remote repository in addition to any files modified by writing the new version. **Default:** ``[]`` ---- .. _config-branches: ``branches`` """""""""""" This setting is discussed in more detail at :ref:`multibranch-releases` **Default:** .. code-block:: toml [semantic_release.branches.main] match = "(main|master)" prerelease_token = "rc" prerelease = false ---- .. _config-build_command: ``build_command`` """"""""""""""""" **Type:** ``Optional[str]`` Command to use to build the current project during :ref:`cmd-version`. Python Semantic Release will execute the build command in the OS default shell with a subset of environment variables. PSR provides the variable ``NEW_VERSION`` in the environment with the value of the next determined version. The following table summarizes all the environment variables that are passed on to the ``build_command`` runtime if they exist in the parent process. If you would like to pass additional environment variables to your build command, see :ref:`config-build_command_env`. ======================== ====================================================================== Variable Name Description ======================== ====================================================================== CI Pass-through ``true`` if exists in process env, unset otherwise BITBUCKET_CI ``true`` if Bitbucket CI variables exist in env, unset otherwise GITHUB_ACTIONS Pass-through ``true`` if exists in process env, unset otherwise GITEA_ACTIONS Pass-through ``true`` if exists in process env, unset otherwise GITLAB_CI Pass-through ``true`` if exists in process env, unset otherwise HOME Pass-through ``HOME`` of parent process NEW_VERSION Semantically determined next version (ex. ``1.2.3``) PATH Pass-through ``PATH`` of parent process PSR_DOCKER_GITHUB_ACTION Pass-through ``true`` if exists in process env, unset otherwise VIRTUAL_ENV Pass-through ``VIRTUAL_ENV`` if exists in process env, unset otherwise ======================== ====================================================================== In addition, on windows systems these environment variables are passed: ======================== ====================================================================== Variable Name Description ======================== ====================================================================== ALLUSERSAPPDATA Pass-through ``ALLUSERAPPDATA`` if exists in process env, unset otherwise ALLUSERSPROFILE Pass-through ``ALLUSERSPPPROFILE`` if exists in process env, unset otherwise APPDATA Pass-through ``APPDATA`` if exists in process env, unset otherwise COMMONPROGRAMFILES Pass-through ``COMMONPROGRAMFILES`` if exists in process env, unset otherwise COMMONPROGRAMFILES(X86) Pass-through ``COMMONPROGRAMFILES(X86)`` if exists in process env, unset otherwise DEFAULTUSERPROFILE Pass-through ``DEFAULTUSERPROFILE`` if exists in process env, unset otherwise HOMEPATH Pass-through ``HOMEPATH`` if exists in process env, unset otherwise PATHEXT Pass-through ``PATHEXT`` if exists in process env, unset otherwise PROFILESFOLDER Pass-through ``PROFILESFOLDER`` if exists in process env, unset otherwise PROGRAMFILES Pass-through ``PROGRAMFILES`` if exists in process env, unset otherwise PROGRAMFILES(X86) Pass-through ``PROGRAMFILES(X86)`` if exists in process env, unset otherwise SYSTEM Pass-through ``SYSTEM`` if exists in process env, unset otherwise SYSTEM16 Pass-through ``SYSTEM16`` if exists in process env, unset otherwise SYSTEM32 Pass-through ``SYSTEM32`` if exists in process env, unset otherwise SYSTEMDRIVE Pass-through ``SYSTEMDRIVE`` if exists in process env, unset otherwise SYSTEMROOT Pass-through ``SYSTEMROOT`` if exists in process env, unset otherwise TEMP Pass-through ``TEMP`` if exists in process env, unset otherwise TMP Pass-through ``TMP`` if exists in process env, unset otherwise USERPROFILE Pass-through ``USERPROFILE`` if exists in process env, unset otherwise USERSID Pass-through ``USERSID`` if exists in process env, unset otherwise WINDIR Pass-through ``WINDIR`` if exists in process env, unset otherwise ======================== ====================================================================== **Default:** ``None`` (not specified) ---- .. _config-build_command_env: ``build_command_env`` """"""""""""""""""""" *Introduced in v9.7.2* **Type:** ``Optional[list[str]]`` List of environment variables to include or pass-through on to the build command that executes during :ref:`cmd-version`. This configuration option allows the user to extend the list of environment variables from the table above in :ref:`config-build_command`. The input is a list of strings where each individual string handles a single variable definition. There are two formats accepted and are detailed in the following table: ================== =================================================================== FORMAT Description ================== =================================================================== ``VAR_NAME`` Detects value from the PSR process environment, and passes value to ``build_command`` process ``VAR_NAME=value`` Sets variable name to value inside of ``build_command`` process ================== =================================================================== .. note:: Although variable name capitalization is not required, it is recommended as to be in-line with the POSIX-compliant recommendation for shell variable names. **Default:** ``None`` (not specified) ---- .. _config-changelog: ``changelog`` """"""""""""" This section outlines the configuration options available that modify changelog generation. .. note:: **pyproject.toml:** ``[tool.semantic_release.changelog]`` **releaserc.toml:** ``[semantic_release.changelog]`` **releaserc.json:** ``{ "semantic_release": { "changelog": {} } }`` ---- .. _config-changelog-changelog_file: ``changelog_file`` ****************** .. warning:: *Deprecated in v9.11.0.* This setting has been moved to :ref:`changelog.default_templates.changelog_file ` for a more logical grouping. This setting will be removed in a future major release. **Type:** ``str`` Specify the name of the changelog file that will be created. This file will be created or overwritten (if it previously exists) with the rendered default template included with Python Semantic Release. Depending on the file extension of this setting, the changelog will be rendered in the format designated by the extension. PSR, as of v9.11.0, provides a default changelog template in both Markdown (``.md``) and reStructuredText (``.rst``) formats. If the file extension is not recognized, the changelog will be rendered in Markdown format, unless the :ref:`config-changelog-default_templates-output_format` setting is set. If you are using the ``template_dir`` setting for providing customized templates, this setting is not used. See :ref:`config-changelog-template_dir` for more information. **Default:** ``"CHANGELOG.md"`` ---- .. _config-changelog-default_templates: ``default_templates`` ********************* .. note:: This section of the configuration contains options which customize or modify the default changelog templates included with PSR. **pyproject.toml:** ``[tool.semantic_release.changelog.default_templates]`` **releaserc.toml:** ``[semantic_release.changelog.default_templates]`` **releaserc.json:** ``{ "semantic_release": { "changelog": { "default_templates": {} } } }`` ---- .. _config-changelog-default_templates-changelog_file: ``changelog_file`` '''''''''''''''''' *Introduced in v9.11.0.* **Type:** ``str`` Specify the name of the changelog file that will be created. This file will be created or overwritten (if it previously exists) with the rendered default template included with Python Semantic Release. Depending on the file extension of this setting, the changelog will be rendered in the format designated by the extension. PSR, as of v9.11.0, provides a default changelog template in both Markdown (``.md``) and reStructuredText (``.rst``) formats. If the file extension is not recognized, the changelog will be rendered in Markdown format, unless the :ref:`config-changelog-default_templates-output_format` setting is set. If you are using the ``template_dir`` setting for providing customized templates, this setting is not used. See :ref:`config-changelog-template_dir` for more information. **Default:** ``"CHANGELOG.md"`` ---- .. _config-changelog-default_templates-mask_initial_release: ``mask_initial_release`` '''''''''''''''''''''''' *Introduced in v9.14.0* **Type:** ``bool`` This option toggles the behavior of the changelog and release note templates to mask the release details specifically for the first release. When set to ``true``, the first version release notes will be masked with a generic message as opposed to the usual commit details. When set to ``false``, the release notes will be generated as normal. The reason for this setting is to improve clarity to your audience. It conceptually does **NOT** make sense to have a list of changes (i.e. a Changelog) for the first release since nothing has been published yet, therefore in the eyes of your consumers what change is there to document? The message details can be found in the ``first_release.md.j2`` and ``first_release.rst.j2`` templates of the default changelog template directory. **Default:** ``false`` .. seealso:: - :ref:`changelog-templates-default_changelog` ---- .. _config-changelog-default_templates-output_format: ``output_format`` ''''''''''''''''' *Introduced in v9.10.0* **Type:** ``Literal["md", "rst"]`` This setting is used to specify the output format the default changelog template will use when rendering the changelog. PSR supports both Markdown (``md``) and reStructuredText (``rst``) formats. This setting will take precedence over the file extension of the :ref:`config-changelog-default_templates-changelog_file` setting. If this setting is omitted, the file extension of the :ref:`config-changelog-default_templates-changelog_file` setting will be used to determine the output format. If the file extension is not recognized, the output format will default to Markdown. **Default:** ``"md"`` .. seealso:: - :ref:`config-changelog-default_templates-changelog_file` ---- .. _config-changelog-environment: ``environment`` *************** .. note:: This section of the configuration contains options which customize the template environment used to render templates such as the changelog. Most options are passed directly to the `jinja2.Environment`_ constructor, and further documentation one these parameters can be found there. **pyproject.toml:** ``[tool.semantic_release.changelog.environment]`` **releaserc.toml:** ``[semantic_release.changelog.environment]`` **releaserc.json:** ``{ "semantic_release": { "changelog": { "environment": {} } } }`` .. _`jinja2.Environment`: https://jinja.palletsprojects.com/en/3.1.x/api/#jinja2.Environment ---- .. _config-changelog-environment-autoescape: ``autoescape`` '''''''''''''' **Type:** ``Union[str, bool]`` If this setting is a string, it should be given in ``module:attr`` form; Python Semantic Release will attempt to dynamically import this string, which should represent a path to a suitable callable that satisfies the following: As of Jinja 2.4 this can also be a callable that is passed the template name and has to return ``true`` or ``false`` depending on autoescape should be enabled by default. The result of this dynamic import is passed directly to the `jinja2.Environment`_ constructor. If this setting is a boolean, it is passed directly to the `jinja2.Environment`_ constructor. **Default:** ``false`` ---- .. _config-changelog-environment-block_start_string: ``block_start_string`` '''''''''''''''''''''' **Type:** ``str`` This setting is passed directly to the `jinja2.Environment`_ constructor. **Default:** ``"{%"`` ---- .. _config-changelog-environment-block_end_string: ``block_end_string`` '''''''''''''''''''' **Type:** ``str`` This setting is passed directly to the `jinja2.Environment`_ constructor. **Default:** ``"%}"`` ---- .. _config-changelog-environment-comment_start_string: ``comment_start_string`` '''''''''''''''''''''''' **Type:** ``str`` This setting is passed directly to the `jinja2.Environment`_ constructor. **Default:** ``{#`` ---- .. _config-changelog-environment-comment_end_string: ``comment_end_string`` '''''''''''''''''''''' **Type:** ``str`` This setting is passed directly to the `jinja2.Environment`_ constructor. **Default:** ``"#}"`` ---- .. _config-changelog-environment-extensions: ``extensions`` '''''''''''''' **Type:** ``list[str]`` This setting is passed directly to the `jinja2.Environment`_ constructor. **Default:** ``[]`` ---- .. _config-changelog-environment-keep_trailing_newline: ``keep_trailing_newline`` ''''''''''''''''''''''''' **Type:** ``bool`` This setting is passed directly to the `jinja2.Environment`_ constructor. **Default:** ``false`` ---- .. _config-changelog-environment-line_comment_prefix: ``line_comment_prefix`` ''''''''''''''''''''''' **Type:** ``Optional[str]`` This setting is passed directly to the `jinja2.Environment`_ constructor. **Default:** ``None`` (not specified) ---- .. _config-changelog-environment-line_statement_prefix: ``line_statement_prefix`` ''''''''''''''''''''''''' **Type:** ``Optional[str]`` This setting is passed directly to the `jinja2.Environment`_ constructor. **Default:** ``None`` (not specified) ---- .. _config-changelog-environment-lstrip_blocks: ``lstrip_blocks`` ''''''''''''''''' **Type:** ``bool`` This setting is passed directly to the `jinja2.Environment`_ constructor. **Default:** ``false`` ---- .. _config-changelog-environment-newline_sequence: ``newline_sequence`` '''''''''''''''''''' **Type:** ``Literal["\n", "\r", "\r\n"]`` This setting is passed directly to the `jinja2.Environment`_ constructor. **Default:** ``"\n"`` ---- .. _config-changelog-environment-trim_blocks: ``trim_blocks`` ''''''''''''''' **Type:** ``bool`` This setting is passed directly to the `jinja2.Environment`_ constructor. **Default:** ``false`` ---- .. _config-changelog-environment-variable_start_string: ``variable_start_string`` ''''''''''''''''''''''''' **Type:** ``str`` This setting is passed directly to the `jinja2.Environment`_ constructor. **Default:** ``"{{"`` ---- .. _config-changelog-environment-variable_end_string: ``variable_end_string`` ''''''''''''''''''''''' **Type:** ``str`` This setting is passed directly to the `jinja2.Environment`_ constructor. **Default:** ``"}}"`` ---- .. _config-changelog-exclude_commit_patterns: ``exclude_commit_patterns`` *************************** **Type:** ``list[str]`` Any patterns specified here will be excluded from the commits which are available to your changelog. This allows, for example, automated commits to be removed if desired. Python Semantic Release also removes its own commits from the Changelog via this mechanism; therefore if you change the automated commit message that Python Semantic Release uses when making commits, you may wish to add the *old* commit message pattern here. The patterns in this list are treated as regular expressions. **Default:** ``[]`` ---- .. _config-changelog-mode: ``mode`` ******** *Introduced in v9.10.0* **Type:** ``Literal["init", "update"]`` This setting is a flag that is ultimately passed into the changelog context environment. It sets the value of ``context.changelog_mode`` to a string value of either ``init`` or ``update``. When used with the provided changelog template, it will determine the behavior of how the changelog is written. When the mode is set to ``init``, the changelog file will be written from scratch, overwriting any existing changelog file. This is the ``v8`` and ``v9`` default behavior. When the mode is set to ``update``, the changelog file will look for the ``insertion_flag`` value in the changelog file (defined by :ref:`config-changelog-changelog_file`) and insert the new version information at that location. If you are using a custom template directory, the `context.changelog_mode` value will exist in the changelog context but it is up to your implementation to determine if and/or how to use it. **Default:** ``init`` .. seealso:: - :ref:`changelog-templates-default_changelog` ---- .. _config-changelog-insertion_flag: ``insertion_flag`` ****************** *Introduced in v9.10.0* **Type:** ``str`` A string that will be used to identify where the new version should be inserted into the changelog file (as defined by :ref:`config-changelog-changelog_file`) when the changelog mode is set to ``update``. If you modify this value in your config, you will need to manually update any saved changelog file to match the new insertion flag if you use the ``update`` mode. In ``init`` mode, the changelog file will be overwritten as normal. In v9.11.0, the ``insertion_flag`` default value became more dynamic with the introduction of an reStructuredText template. The default value will be set depending on the :ref:`config-changelog-default_templates-output_format` setting. The default flag values are: ================== ========================= Output Format Default Insertion Flag ================== ========================= Markdown (``md``) ```` reStructuredText ``..\n version list`` ================== ========================= **Default:** various, see above ---- .. _config-changelog-template_dir: ``template_dir`` **************** **Type:** ``str`` When files exist within the specified directory, they will be used as templates for the changelog rendering process. Regardless if the directory includes a changelog file, the provided directory will be rendered and files placed relative to the root of the project directory. No default changelog template or release notes template will be used when this directory exists and the directory is not empty. If the directory is empty, the default changelog template will be used. This option is discussed in more detail at :ref:`changelog-templates` **Default:** ``"templates"`` ---- .. _config-commit_author: ``commit_author`` """"""""""""""""" **Type:** ``str`` Author used in commits in the format ``name ``. .. note:: If you are using the built-in GitHub Action, the default value is set to ``github-actions ``. You can modify this with the ``git_committer_name`` and ``git_committer_email`` inputs. .. seealso:: - :ref:`gh_actions` **Default:** ``semantic-release `` ---- .. _config-commit_message: ``commit_message`` """""""""""""""""" **Type:** ``str`` Commit message to use when making release commits. The message can use ``{version}`` as a format key, in which case the version being released will be formatted into the message. If at some point in your project's lifetime you change this, you may wish to consider, adding the old message pattern(s) to :ref:`exclude_commit_patterns `. **Default:** ``"{version}\n\nAutomatically generated by python-semantic-release"`` ---- .. _config-commit_parser: ``commit_parser`` """"""""""""""""" **Type:** ``str`` Specify which commit parser Python Semantic Release should use to parse the commits within the Git repository. Built-in parsers: * ``angular`` - :ref:`AngularCommitParser ` *(deprecated in v9.19.0)* * ``conventional`` - :ref:`ConventionalCommitParser ` *(available in v9.19.0+)* * ``emoji`` - :ref:`EmojiCommitParser ` * ``scipy`` - :ref:`ScipyCommitParser ` * ``tag`` - :ref:`TagCommitParser ` *(deprecated in v9.12.0)* You can set any of the built-in parsers by their keyword but you can also specify your own commit parser in ``path/to/module_file.py:Class`` or ``module:Class`` form. For more information see :ref:`commit-parsing`. **Default:** ``"conventional"`` ---- .. _config-commit_parser_options: ``commit_parser_options`` """"""""""""""""""""""""" **Type:** ``dict[str, Any]`` This set of options are passed directly to the commit parser class specified in :ref:`the commit parser ` configuration option. For more information (to include defaults), see :ref:`commit_parser-builtin-customization`. **Default:** ``ParserOptions { ... }``, where ``...`` depends on :ref:`commit_parser `. ---- .. _config-logging_use_named_masks: ``logging_use_named_masks`` """"""""""""""""""""""""""" **Type:** ``bool`` Whether or not to replace secrets identified in logging messages with named masks identifying which secrets were replaced, or use a generic string to mask them. **Default:** ``false`` ---- .. _config-major_on_zero: ``major_on_zero`` """"""""""""""""" **Type:** ``bool`` This flag controls whether or not Python Semantic Release will increment the major version upon a breaking change when the version matches ``0.y.z``. This value is set to ``true`` by default, where breaking changes will increment the ``0`` major version to ``1.0.0`` like normally expected. If set to ``false``, major (breaking) releases will increment the minor digit of the version while the major version is ``0``, instead of the major digit. This allows for continued breaking changes to be made while the major version remains ``0``. From the `Semantic Versioning Specification`_: Major version zero (0.y.z) is for initial development. Anything MAY change at any time. The public API SHOULD NOT be considered stable. .. _Semantic Versioning Specification: https://semver.org/spec/v2.0.0.html#spec-item-4 When you are ready to release a stable version, set ``major_on_zero`` to ``true`` and run Python Semantic Release again. This will increment the major version to ``1.0.0``. When :ref:`config-allow_zero_version` is set to ``false``, this setting is ignored. **Default:** ``true`` ---- .. _config-no_git_verify: ``no_git_verify`` """"""""""""""""" *Introduced in v9.8.0* **Type:** ``bool`` This flag is passed along to ``git`` upon performing a ``git commit`` during :ref:`cmd-version`. When true, it will bypass any git hooks that are set for the repository when Python Semantic Release makes a version commit. When false, the commit is performed as normal. This option has no effect when there are not any git hooks configured nor when the ``--no-commit`` option is passed. **Default:** ``false`` ---- .. _config-publish: ``publish`` """"""""""" This section defines configuration options that modify :ref:`cmd-publish`. .. note:: **pyproject.toml:** ``[tool.semantic_release.publish]`` **releaserc.toml:** ``[semantic_release.publish]`` **releaserc.json:** ``{ "semantic_release": { "publish": {} } }`` ---- .. _config-publish-dist_glob_patterns: ``dist_glob_patterns`` ********************** **Type:** ``list[str]`` Upload any files matching any of these globs to your VCS release. Each item in this list should be a string containing a Unix-style glob pattern. **Default:** ``["dist/*"]`` ---- .. _config-publish-upload_to_vcs_release: ``upload_to_vcs_release`` ************************* **Type:** ``bool`` If set to ``true``, upload any artifacts matched by the :ref:`dist_glob_patterns ` to the release created in the remote VCS corresponding to the latest tag. Artifacts are only uploaded if release artifact uploads are supported by the :ref:`VCS type `. **Default:** ``true`` ---- .. _config-remote: ``remote`` """""""""" The remote configuration is a group of settings that configure PSR's integration with remote version control systems. .. note:: **pyproject.toml:** ``[tool.semantic_release.remote]`` **releaserc.toml:** ``[semantic_release.remote]`` **releaserc.json:** ``{ "semantic_release": { "remote": {} } }`` ---- .. _config-remote-api_domain: ``api_domain`` ************** **Type:** ``Optional[str | Dict['env', str]]`` The hosting domain for the API of your remote HVCS if different than the ``domain``. Generally, this will be used to specify a separate subdomain that is used for API calls rather than the primary domain (ex. ``api.github.com``). **Most on-premise HVCS installations will NOT use this setting!** Whether or not this value is used depends on the HVCS configured (and your server administration) in the :ref:`remote.type ` setting and used in tandem with the :ref:`remote.domain ` setting. When using a custom :ref:`remote.domain ` and a HVCS :ref:`remote.type ` that is configured with a separate domain or sub-domain for API requests, this value is used to configure the location of API requests that are sent from PSR. Most on-premise or self-hosted HVCS environments will use a path prefix to handle inbound API requests, which means this value will ignored. PSR knows the expected api domains for known cloud services and their associated api domains which means this value is not necessary to explicitly define for services as ``bitbucket.org``, and ``github.com``. Including the protocol schemes, such as ``https://``, for the API domain is optional. Secure ``HTTPS`` connections are assumed unless the setting of :ref:`remote.insecure ` is ``true``. **Default:** ``None`` ---- .. _config-remote-domain: ``domain`` ********** **Type:** ``Optional[str | Dict['env', str]]`` The host domain for your HVCS server. This setting is used to support on-premise installations of HVCS providers with custom domain hosts. If you are using the official domain of the associated :ref:`remote.type `, this value is not required. PSR will use the default domain value for the :ref:`remote.type ` when not specified. For example, when ``remote.type="github"`` is specified the default domain of ``github.com`` is used. Including the protocol schemes, such as ``https://``, for the domain value is optional. Secure ``HTTPS`` connections are assumed unless the setting of :ref:`remote.insecure ` is ``true``. This setting also supports reading from an environment variable for ease-of-use in CI pipelines. See :ref:`Environment Variable ` for more information. Depending on the :ref:`remote.type `, the default environment variable for the default domain's CI pipeline environment will automatically be checked so this value is not required in default environments. For example, when ``remote.type="gitlab"`` is specified, PSR will look to the ``CI_SERVER_URL`` environment variable when ``remote.domain`` is not specified. **Default:** ``None`` .. seealso:: - :ref:`remote.api_domain ` ---- .. _config-remote-ignore_token_for_push: ``ignore_token_for_push`` ************************* **Type:** ``bool`` If set to ``true``, ignore the authentication token when pushing changes to the remote. This is ideal, for example, if you already have SSH keys set up which can be used for pushing. **Default:** ``false`` ---- .. _config-remote-insecure: ``insecure`` ************ *Introduced in v9.4.2* **Type:** ``bool`` Insecure is used to allow non-secure ``HTTP`` connections to your HVCS server. If set to ``true``, any domain value passed will assume ``http://`` if it is not specified and allow it. When set to ``false`` (implicitly or explicitly), it will force ``https://`` communications. When a custom ``domain`` or ``api_domain`` is provided as a configuration, this flag governs the protocol scheme used for those connections. If the protocol scheme is not provided in the field value, then this ``insecure`` option defines whether ``HTTP`` or ``HTTPS`` is used for the connection. If the protocol scheme is provided in the field value, it must match this setting or it will throw an error. The purpose of this flag is to prevent any typos in provided ``domain`` and ``api_domain`` values that accidentally specify an insecure connection but allow users to toggle the protection scheme off when desired. **Default:** ``false`` ---- .. _config-remote-name: ``name`` ******** **Type:** ``str`` Name of the remote to push to using ``git push -u $name `` **Default:** ``"origin"`` ---- .. _config-remote-token: ``token`` ********* **Type:** ``Optional[str | Dict['env', str]]`` :ref:`Environment Variable ` from which to source the authentication token for the remote VCS. Common examples include ``"GH_TOKEN"``, ``"GITLAB_TOKEN"`` or ``"GITEA_TOKEN"``, however, you may choose to use a custom environment variable if you wish. .. note:: By default, this is a **mandatory** environment variable that must be set before using any functionality that requires authentication with your remote VCS. If you are using this token to enable push access to the repository, it must also be set before attempting to push. If your push access is enabled via SSH keys instead, then you do not need to set this environment variable in order to push the version increment, changelog and modified source code assets to the remote using :ref:`cmd-version`. However, you will need to disable release creation using the :ref:`cmd-version-option-vcs-release` option, among other options, in order to use Python Semantic Release without configuring the environment variable for your remote VCS authentication token. The default value for this setting depends on what you specify as :ref:`remote.type `. Review the table below to see what the default token value will be for each remote type. ================ == =============================== ``remote.type`` Default ``remote.token`` ================ == =============================== ``"github"`` -> ``{ env = "GH_TOKEN" }`` ``"gitlab"`` -> ``{ env = "GITLAB_TOKEN" }`` ``"gitea"`` -> ``{ env = "GITEA_TOKEN" }`` ``"bitbucket"`` -> ``{ env = "BITBUCKET_TOKEN" }`` ================ == =============================== **Default:** ``{ env = "" }``, where ```` depends on :ref:`remote.type ` as indicated above. ---- .. _config-remote-type: ``type`` ******** **Type:** ``Literal["bitbucket", "gitea", "github", "gitlab"]`` The type of the remote VCS. Currently, Python Semantic Release supports ``"github"``, ``"gitlab"``, ``"gitea"`` and ``"bitbucket"``. Not all functionality is available with all remote types, but we welcome pull requests to help improve this! **Default:** ``"github"`` ---- .. _config-remote-url: ``url`` ******* **Type:** ``Optional[str | Dict['env', str]]`` An override setting used to specify the remote upstream location of ``git push``. **Not commonly used!** This is used to override the derived upstream location when the desired push location is different than the location the repository was cloned from. This setting will override the upstream location url that would normally be derived from the :ref:`remote.name ` location of your git repository. **Default:** ``None`` ---- .. _config-tag_format: ``tag_format`` """""""""""""" **Type:** ``str`` Specify the format to be used for the Git tag that will be added to the repo during a release invoked via :ref:`cmd-version`. The format string is a regular expression, which also must include the format keys below, otherwise an exception will be thrown. It *may* include any of the optional format keys, in which case the contents described will be formatted into the specified location in the Git tag that is created. For example, ``"(dev|stg|prod)-v{version}"`` is a valid ``tag_format`` matching tags such as: - ``dev-v1.2.3`` - ``stg-v0.1.0-rc.1`` - ``prod-v2.0.0+20230701`` This format will also be used for parsing tags already present in the repository into semantic versions; therefore if the tag format changes at some point in the repository's history, historic versions that no longer match this pattern will not be considered as versions. ================ ========= ========================================================== Format Key Mandatory Contents ================ ========= ========================================================== ``{version}`` Yes The new semantic version number, for example ``1.2.3``, or ``2.1.0-alpha.1+build.1234`` ================ ========= ========================================================== Tags which do not match this format will not be considered as versions of your project. **Default:** ``"v{version}"`` ---- .. _config-version_toml: ``version_toml`` """""""""""""""" **Type:** ``list[str]`` This configuration option is similar to :ref:`config-version_variables`, but it uses a TOML parser to interpret the data structure before, inserting the version. This allows users to use dot-notation to specify the version via the logical structure within the TOML file, which is more accurate than a pattern replace. The ``version_toml`` option is commonly used to update the version number in the project definition file: ``pyproject.toml`` as seen in the example below. As of v9.20.0, the ``version_toml`` option accepts a colon-separated definition with either 2 or 3 parts. The 2-part definition includes the file path and the version parameter (in dot-notation). Newly with v9.20.0, it also accepts an optional 3rd part to allow configuration of the format type. **Available Format Types** - ``nf``: Number format (ex. ``1.2.3``) - ``tf``: :ref:`Tag Format ` (ex. ``v1.2.3``) If the format type is not specified, it will default to the number format. **Example** .. code-block:: toml [semantic_release] version_toml = [ # "file:variable:[format_type]" "pyproject.toml:tool.poetry.version", # Implied Default: Number format "definition.toml:project.version:nf", # Number format "definition.toml:project.release:tf", # Tag format ] This configuration will result in the following changes: .. code-block:: diff diff a/pyproject.toml b/pyproject.toml [tool.poetry] - version = "0.1.0" + version = "0.2.0" .. code-block:: diff diff a/definition.toml b/definition.toml [project] name = "example" - version = "0.1.0" + version = "0.1.0" - release = "v0.1.0" + release = "v0.2.0" **Default:** ``[]`` ---- .. _config-version_variables: ``version_variables`` """"""""""""""""""""" **Type:** ``list[str]`` The ``version_variables`` configuration option is a list of string definitions that defines where the version number should be updated in the repository, when a new version is released. As of v9.20.0, the ``version_variables`` option accepts a colon-separated definition with either 2 or 3 parts. The 2-part definition includes the file path and the variable name. Newly with v9.20.0, it also accepts an optional 3rd part to allow configuration of the format type. **Available Format Types** - ``nf``: Number format (ex. ``1.2.3``) - ``tf``: :ref:`Tag Format ` (ex. ``v1.2.3``) If the format type is not specified, it will default to the number format. Prior to v9.20.0, PSR only supports entries with the first 2-parts as the tag format type was not available and would only replace numeric version numbers. **Example** .. code-block:: toml [semantic_release] tag_format = "v{version}" version_variables = [ # "file:variable:format_type" "src/semantic_release/__init__.py:__version__", # Implied Default: Number format "docs/conf.py:version:nf", # Number format for sphinx docs "kustomization.yml:newTag:tf", # Tag format ] First, the ``__version__`` variable in ``src/semantic_release/__init__.py`` will be updated with the next version using the `SemVer`_ number format. .. code-block:: diff diff a/src/semantic_release/__init__.py b/src/semantic_release/__init__.py - __version__ = "0.1.0" + __version__ = "0.2.0" Then, the ``version`` variable in ``docs/conf.py`` will be updated with the next version with the next version using the `SemVer`_ number format because of the explicit ``nf``. .. code-block:: diff diff a/docs/conf.py b/docs/conf.py - version = "0.1.0" + version = "0.2.0" Lastly, the ``newTag`` variable in ``kustomization.yml`` will be updated with the next version with the next version using the configured :ref:`config-tag_format` because the definition included ``tf``. .. code-block:: diff diff a/kustomization.yml b/kustomization.yml images: - name: repo/image - newTag: v0.1.0 + newTag: v0.2.0 **How It works** Each version variable will be transformed into a Regular Expression that will be used to substitute the version number in the file. The replacement algorithm is **ONLY** a pattern match and replace. It will **NOT** evaluate the code nor will PSR understand any internal object structures (ie. ``file:object.version`` will not work). The regular expression generated from the ``version_variables`` definition will: 1. Look for the specified ``variable`` name in the ``file``. The variable name can be enclosed by single (``'``) or double (``"``) quotation marks but they must match. 2. The variable name defined by ``variable`` and the version must be separated by an operand symbol (``=``, ``:``, ``:=``, or ``@``). Whitespace is optional around the symbol. 3. The value of the variable must match a `SemVer`_ regular expression and can be enclosed by single (``'``) or double (``"``) quotation marks but they must match. However, the enclosing quotes of the value do not have to match the quotes surrounding the variable name. 4. If the format type is set to ``tf`` then the variable value must have the matching prefix and suffix of the :ref:`config-tag_format` setting around the `SemVer`_ version number. Given the pattern matching nature of this feature, the Regular Expression is able to support most file formats because of the similarity of variable declaration across programming languages. PSR specifically supports Python, YAML, and JSON as these have been the most commonly requested formats. This configuration option will also work regardless of file extension because it looks for a matching pattern string. .. note:: This will also work for TOML but we recommend using :ref:`config-version_toml` for TOML files as it actually will interpret the TOML file and replace the version number before writing the file back to disk. This is a comprehensive list (but not all variations) of examples where the following versions will be matched and replaced by the new version: .. code-block:: # Common variable declaration formats version='1.2.3' version = "1.2.3" release = "v1.2.3" # if tag_format is set # YAML version: 1.2.3 # JSON "version": "1.2.3" # NPM & GitHub Actions YAML version@1.2.3 version@v1.2.3 # if tag_format is set # Walrus Operator version := "1.2.3" # Excessive whitespace version = '1.2.3' # Mixed Quotes "version" = '1.2.3' # Custom Tag Format with tag_format set (monorepos) __release__ = "module-v1.2.3" .. important:: The Regular Expression expects a version value to exist in the file to be replaced. It cannot be an empty string or a non-semver compliant string. If this is the very first time you are using PSR, we recommend you set the version to ``0.0.0``. This may become more flexible in the future with resolution of issue `#941`_. .. _#941: https://github.com/python-semantic-release/python-semantic-release/issues/941 .. warning:: If the file (ex. JSON) you are replacing has two of the same variable name in it, this pattern match will not be able to differentiate between the two and will replace both. This is a limitation of the pattern matching and not a bug. **Default:** ``[]`` .. _SemVer: https://semver.org/ python-semantic-release-9.21.0/docs/contributing.rst000066400000000000000000000000411475670435200225440ustar00rootroot00000000000000.. include:: ../CONTRIBUTING.rst python-semantic-release-9.21.0/docs/contributors.rst000066400000000000000000000000341475670435200225740ustar00rootroot00000000000000.. include:: ../AUTHORS.rst python-semantic-release-9.21.0/docs/index.rst000066400000000000000000000231651475670435200211600ustar00rootroot00000000000000Python Semantic Release *********************** |Ruff| |Test Status| |PyPI Version| |conda-forge version| |Read the Docs Status| |Pre-Commit Enabled| Automatic Semantic Versioning for Python projects. This is a Python implementation of `semantic-release`_ for JS by Stephan Bönnemann. If you find this topic interesting you should check out his `talk from JSConf Budapest`_. The general idea is to be able to detect what the next version of the project should be based on the commits. This tool will use that to automate the whole release, upload to an artifact repository and post changelogs to GitHub. You can run the tool on a CI service, or just run it locally. Installation ============ :: python3 -m pip install python-semantic-release semantic-release --help Python Semantic Release is also available from `conda-forge`_ or as a `GitHub Action`_. Read more about the setup and configuration in our `getting started guide`_. .. _semantic-release: https://github.com/semantic-release/semantic-release .. _talk from JSConf Budapest: https://www.youtube.com/watch?v=tc2UgG5L7WM .. _getting started guide: https://python-semantic-release.readthedocs.io/en/latest/#getting-started .. _GitHub Action: https://python-semantic-release.readthedocs.io/en/latest/automatic-releases/github-actions.html .. _conda-forge: https://anaconda.org/conda-forge/python-semantic-release .. |Test Status| image:: https://img.shields.io/github/actions/workflow/status/python-semantic-release/python-semantic-release/cicd.yml?branch=master&label=Test%20Status&logo=github :target: https://github.com/python-semantic-release/python-semantic-release/actions/workflows/cicd.yml :alt: test-status .. |PyPI Version| image:: https://img.shields.io/pypi/v/python-semantic-release?label=PyPI&logo=pypi :target: https://pypi.org/project/python-semantic-release/ :alt: pypi .. |conda-forge Version| image:: https://img.shields.io/conda/vn/conda-forge/python-semantic-release?logo=anaconda :target: https://anaconda.org/conda-forge/python-semantic-release :alt: conda-forge .. |Read the Docs Status| image:: https://img.shields.io/readthedocs/python-semantic-release?label=Read%20the%20Docs&logo=Read%20the%20Docs :target: https://python-semantic-release.readthedocs.io/en/latest/ :alt: docs .. |Pre-Commit Enabled| image:: https://img.shields.io/badge/pre--commit-enabled-brightgreen?logo=pre-commit :target: https://github.com/pre-commit/pre-commit :alt: pre-commit .. |Ruff| image:: https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/astral-sh/ruff/main/assets/badge/v2.json :target: https://github.com/astral-sh/ruff :alt: Ruff Documentation Contents ====================== .. toctree:: :maxdepth: 1 commands Strict Mode configuration commit_parsing Changelog Templates Multibranch Releases automatic-releases/index troubleshooting contributing contributors Migrating from Python Semantic Release v7 Internal API Algorithm Changelog View on GitHub Getting Started =============== If you haven't done so already, install Python Semantic Release following the instructions above. There is no strict requirement to have it installed locally if you intend on :ref:`using a CI service `, however running with :ref:`cmd-main-option-noop` can be useful to test your configuration. Generating your configuration ----------------------------- Python Semantic Release ships with a command-line interface, ``semantic-release``. You can inspect the default configuration in your terminal by running ``semantic-release generate-config`` You can also use the :ref:`-f/--format ` option to specify what format you would like this configuration to be. The default is TOML, but JSON can also be used. You can append the configuration to your existing ``pyproject.toml`` file using a standard redirect, for example: ``semantic-release generate-config --pyproject >> pyproject.toml`` and then editing to your project's requirements. .. seealso:: - :ref:`cmd-generate-config` - :ref:`configuration` Setting up version numbering ---------------------------- Create a variable set to the current version number. This could be anywhere in your project, for example ``setup.py``:: from setuptools import setup __version__ = "0.0.0" setup( name="my-package", version=__version__, # And so on... ) Python Semantic Release can be configured using a TOML or JSON file; the default configuration file is ``pyproject.toml``, if you wish to use another file you will need to use the ``-c/--config`` option to specify the file. Set :ref:`version_variables ` to a list, the only element of which should be the location of your version variable inside any Python file, specified in standard ``module:attribute`` syntax: ``pyproject.toml``:: [tool.semantic_release] version_variables = ["setup.py:__version__"] .. seealso:: - :ref:`configuration` - tailor Python Semantic Release to your project Setting up commit parsing ------------------------- We rely on commit messages to detect when a version bump is needed. By default, Python Semantic Release uses the `Conventional Commits Specification`_ to parse commit messages. You can find out more about this in :ref:`commit-parsing`. .. seealso:: - :ref:`config-branches` - Adding configuration for releases from multiple branches. - :ref:`commit_parser ` - use a different parser for commit messages. For example, Python Semantic Release also ships with emoji and scipy-style parsers. - :ref:`remote.type ` - specify the type of your remote VCS. .. _Conventional Commits Specification: https://www.conventionalcommits.org/en/v1.0.0 Setting up the changelog ------------------------ .. seealso:: - :ref:`Changelog ` - Customize the changelog generated by Python Semantic Release. - :ref:`changelog-templates-migrating-existing-changelog` .. _index-creating-vcs-releases: Creating VCS Releases --------------------- You can set up Python Semantic Release to create Releases in your remote version control system, so you can publish assets and release notes for your project. In order to do so, you will need to place an authentication token in the appropriate environment variable so that Python Semantic Release can authenticate with the remote VCS to push tags, create releases, or upload files. GitHub (``GH_TOKEN``) """"""""""""""""""""" For local publishing to GitHub, you should use a personal access token and store it in your environment variables. Specify the name of the environment variable in your configuration setting :ref:`remote.token `. The default is ``GH_TOKEN``. To generate a token go to https://github.com/settings/tokens and click on "Generate new token". For Personal Access Token (classic), you will need the ``repo`` scope to write (ie. push) to the repository. For fine-grained Personal Access Tokens, you will need the `contents`__ permission. __ https://docs.github.com/en/rest/authentication/permissions-required-for-fine-grained-personal-access-tokens#repository-permissions-for-contents GitLab (``GITLAB_TOKEN``) """"""""""""""""""""""""" A personal access token from GitLab. This is used for authenticating when pushing tags, publishing releases etc. This token should be stored in the ``GITLAB_TOKEN`` environment variable. Gitea (``GITEA_TOKEN``) """"""""""""""""""""""" A personal access token from Gitea. This token should be stored in the ``GITEA_TOKEN`` environment variable. Bitbucket (``BITBUCKET_TOKEN``) """"""""""""""""""""""""""""""" Bitbucket does not support uploading releases but can still benefit from automated tags and changelogs. The user has three options to push changes to the repository: #. Use SSH keys. #. Use an `App Secret`_, store the secret in the ``BITBUCKET_TOKEN`` environment variable and the username in ``BITBUCKET_USER``. #. Use an `Access Token`_ for the repository and store it in the ``BITBUCKET_TOKEN`` environment variable. .. _App Secret: https://support.atlassian.com/bitbucket-cloud/docs/push-back-to-your-repository/#App-secret .. _Access Token: https://support.atlassian.com/bitbucket-cloud/docs/repository-access-tokens .. seealso:: - :ref:`Changelog ` - customize your project's changelog. - :ref:`changelog-templates-custom_release_notes` - customize the published release notes - :ref:`upload_to_vcs_release ` - enable/disable uploading artifacts to VCS releases - :ref:`version --vcs-release/--no-vcs-release ` - enable/disable VCS release creation. - `upload-to-gh-release`_, a GitHub Action for running ``semantic-release publish`` .. _upload-to-gh-release: https://github.com/python-semantic-release/upload-to-gh-release .. _running-from-setuppy: Running from setup.py --------------------- Add the following hook to your ``setup.py`` and you will be able to run ``python setup.py `` as you would ``semantic-release ``:: try: from semantic_release import setup_hook setup_hook(sys.argv) except ImportError: pass .. note:: Only the :ref:`version `, :ref:`publish `, and :ref:`changelog ` commands may be invoked from setup.py in this way. Running on CI ------------- Getting a fully automated setup with releases from CI can be helpful for some projects. See :ref:`automatic`. python-semantic-release-9.21.0/docs/make.bat000066400000000000000000000151171475670435200207220ustar00rootroot00000000000000@ECHO OFF REM Command file for Sphinx documentation if "%SPHINXBUILD%" == "" ( set SPHINXBUILD=sphinx-build ) set BUILDDIR=_build set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . set I18NSPHINXOPTS=%SPHINXOPTS% . if NOT "%PAPER%" == "" ( set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% ) if "%1" == "" goto help if "%1" == "help" ( :help echo.Please use `make ^` where ^ is one of echo. html to make standalone HTML files echo. dirhtml to make HTML files named index.html in directories echo. singlehtml to make a single large HTML file echo. pickle to make pickle files echo. json to make JSON files echo. htmlhelp to make HTML files and a HTML help project echo. qthelp to make HTML files and a qthelp project echo. devhelp to make HTML files and a Devhelp project echo. epub to make an epub echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter echo. text to make text files echo. man to make manual pages echo. texinfo to make Texinfo files echo. gettext to make PO message catalogs echo. changes to make an overview over all changed/added/deprecated items echo. xml to make Docutils-native XML files echo. pseudoxml to make pseudoxml-XML files for display purposes echo. linkcheck to check all external links for integrity echo. doctest to run all doctests embedded in the documentation if enabled goto end ) if "%1" == "clean" ( for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i del /q /s %BUILDDIR%\* goto end ) %SPHINXBUILD% 2> nul if errorlevel 9009 ( echo. echo.The 'sphinx-build' command was not found. Make sure you have Sphinx echo.installed, then set the SPHINXBUILD environment variable to point echo.to the full path of the 'sphinx-build' executable. Alternatively you echo.may add the Sphinx directory to PATH. echo. echo.If you don't have Sphinx installed, grab it from echo.http://sphinx-doc.org/ exit /b 1 ) if "%1" == "html" ( %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/html. goto end ) if "%1" == "dirhtml" ( %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. goto end ) if "%1" == "singlehtml" ( %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. goto end ) if "%1" == "pickle" ( %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can process the pickle files. goto end ) if "%1" == "json" ( %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can process the JSON files. goto end ) if "%1" == "htmlhelp" ( %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can run HTML Help Workshop with the ^ .hhp project file in %BUILDDIR%/htmlhelp. goto end ) if "%1" == "qthelp" ( %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can run "qcollectiongenerator" with the ^ .qhcp project file in %BUILDDIR%/qthelp, like this: echo.^> qcollectiongenerator %BUILDDIR%\qthelp\python-semantic-release.qhcp echo.To view the help file: echo.^> assistant -collectionFile %BUILDDIR%\qthelp\python-semantic-release.ghc goto end ) if "%1" == "devhelp" ( %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp if errorlevel 1 exit /b 1 echo. echo.Build finished. goto end ) if "%1" == "epub" ( %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub if errorlevel 1 exit /b 1 echo. echo.Build finished. The epub file is in %BUILDDIR%/epub. goto end ) if "%1" == "latex" ( %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex if errorlevel 1 exit /b 1 echo. echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. goto end ) if "%1" == "latexpdf" ( %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex cd %BUILDDIR%/latex make all-pdf cd %BUILDDIR%/.. echo. echo.Build finished; the PDF files are in %BUILDDIR%/latex. goto end ) if "%1" == "latexpdfja" ( %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex cd %BUILDDIR%/latex make all-pdf-ja cd %BUILDDIR%/.. echo. echo.Build finished; the PDF files are in %BUILDDIR%/latex. goto end ) if "%1" == "text" ( %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text if errorlevel 1 exit /b 1 echo. echo.Build finished. The text files are in %BUILDDIR%/text. goto end ) if "%1" == "man" ( %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man if errorlevel 1 exit /b 1 echo. echo.Build finished. The manual pages are in %BUILDDIR%/man. goto end ) if "%1" == "texinfo" ( %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo if errorlevel 1 exit /b 1 echo. echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. goto end ) if "%1" == "gettext" ( %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale if errorlevel 1 exit /b 1 echo. echo.Build finished. The message catalogs are in %BUILDDIR%/locale. goto end ) if "%1" == "changes" ( %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes if errorlevel 1 exit /b 1 echo. echo.The overview file is in %BUILDDIR%/changes. goto end ) if "%1" == "linkcheck" ( %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck if errorlevel 1 exit /b 1 echo. echo.Link check complete; look for any errors in the above output ^ or in %BUILDDIR%/linkcheck/output.txt. goto end ) if "%1" == "doctest" ( %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest if errorlevel 1 exit /b 1 echo. echo.Testing of doctests in the sources finished, look at the ^ results in %BUILDDIR%/doctest/output.txt. goto end ) if "%1" == "xml" ( %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml if errorlevel 1 exit /b 1 echo. echo.Build finished. The XML files are in %BUILDDIR%/xml. goto end ) if "%1" == "pseudoxml" ( %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml if errorlevel 1 exit /b 1 echo. echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml. goto end ) :end python-semantic-release-9.21.0/docs/migrating_from_v7.rst000066400000000000000000000513041475670435200234650ustar00rootroot00000000000000.. _migrating-from-v7: Migrating from Python Semantic Release v7 ========================================= Python Semantic Release 8.0.0 introduced a number of breaking changes. The internals have been changed significantly to better support highly-requested features and to streamline the maintenance of the project. As a result, certain things have been removed, reimplemented differently, or now exhibit different behavior to earlier versions of Python Semantic Release. This page is a guide to help projects to ``pip install python-semantic-release>=8.0.0`` with fewer surprises. .. _breaking-github-action: Python Semantic Release GitHub Action ------------------------------------- .. _breaking-removed-artefact-upload: GitHub Action no longer publishes artifacts to PyPI or GitHub Releases """""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" Python Semantic Release no longer uploads distributions to PyPI - see :ref:`breaking-commands-repurposed-version-and-publish`. If you are using Python Semantic Release to publish release notes and artifacts to GitHub releases, there is a new GitHub Action `upload-to-gh-release`_ which will perform this action for you. This means the following workflows perform the same actions, and if you are using the former, you will need to modify your workflow to include the steps in the latter. This workflow is written to use Python Semantic Release v7.33.5: .. code:: yaml --- name: Semantic Release on: push: branches: - main jobs: release: runs-on: ubuntu-latest concurrency: release steps: - uses: actions/checkout@v3 with: fetch-depth: 0 # This action uses Python Semantic Release v7 - name: Python Semantic Release uses: python-semantic-release/python-semantic-release@v7.33.5 with: github_token: ${{ secrets.GITHUB_TOKEN }} repository_username: __token__ repository_password: ${{ secrets.PYPI_TOKEN }} The following workflow achieves the same result using Python Semantic Release v8, the `upload-to-gh-release`_ GitHub Action, and the `pypa/gh-action-pypi-publish`_ GitHub Action: .. code:: yaml --- name: Semantic Release on: push: branches: - main jobs: release: runs-on: ubuntu-latest concurrency: release permissions: id-token: write steps: - uses: actions/checkout@v3 with: fetch-depth: 0 # This action uses Python Semantic Release v8 - name: Python Semantic Release id: release uses: python-semantic-release/python-semantic-release@v8.7.0 with: github_token: ${{ secrets.GITHUB_TOKEN }} - name: Publish package distributions to PyPI uses: pypa/gh-action-pypi-publish@v1 # NOTE: DO NOT wrap the conditional in ${{ }} as it will always evaluate to true. # See https://github.com/actions/runner/issues/1173 if: steps.release.outputs.released == 'true' - name: Publish package distributions to GitHub Releases uses: python-semantic-release/upload-to-gh-release@v8.7.0 if: steps.release.outputs.released == 'true' with: github_token: ${{ secrets.GITHUB_TOKEN }} .. _upload-to-gh-release: https://github.com/python-semantic-release/upload-to-gh-release .. _pypa/gh-action-pypi-publish: https://github.com/pypa/gh-action-pypi-publish .. _breaking-github-action-removed-pypi-token: Removal of ``pypi_token``, ``repository_username`` and ``repository_password`` inputs """"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" Since the library no longer supports publishing to PyPI, the ``pypi_token``, ``repository_username`` and ``repository_password`` inputs of the GitHub action have all been removed. See the above section for how to publish to PyPI using the official GitHub Action from the Python Packaging Authority (PyPA). .. _breaking-options-inputs: Rename ``additional_options`` to ``root_options`` """"""""""""""""""""""""""""""""""""""""""""""""" Because the purposes of the :ref:`cmd-version` and :ref:`cmd-publish` commands have changed, the GitHub action now performs both commands in sequence. For this reason, and because the usage of the CLI has changed, ``additional_options`` has been renamed to ``root_options`` to reflect the fact that the options are for the main :ref:`cmd-main` command group. .. _breaking-commands: Commands -------- .. _breaking-commands-repurposed-version-and-publish: Repurposing of ``version`` and ``publish`` commands """"""""""""""""""""""""""""""""""""""""""""""""""" Python Semantic Release's primary purpose is to enable automation of correct semantic versioning for software projects. Over the years, this automation has been extended to include other actions such as building/publishing the project and its artifacts to artefact repositories, creating releases in remote version control systems, and writing changelogs. In Python Semantic Release <8.0.0, the ``publish`` command was a one-stop-shop for performing every piece of automation provided. This has been changed - the ``version`` command now handles determining the next version, applying the changes to the project metadata according to the configuration, writing a changelog, and committing/pushing changes to the remote Git repository. It also handles creating a release in the remote VCS. It does *not* publish software artifacts to remote repositories such as PyPI; the rationale behind this decision is simply that under the hood, Python Semantic Release used `twine`_ to upload artifacts to package indexes such as PyPI, and it's recommended to use twine directly via the command-line. From the twine `documentation `_: Twine is a command-line tool for interacting with PyPI securely over HTTPS. As a result Python Semantic Release no longer depends on twine internals. The ``publish`` command now handles publishing software artifacts to releases in the remote version control system. .. _twine: https://twine.readthedocs.io/en/stable .. _twine upload: https://twine.readthedocs.io/en/stable/#twine-upload To achieve a similar flow of logic such as 1. Determine the next version 2. Write this version to the configured metadata locations 3. Write the changelog 4. Push the changes to the metadata and changelog to the remote repository 5. Create a release in the remote version control system 6. Build a wheel 7. Publish the wheel to PyPI 8. Publish the distribution artifacts to the release in the remote VCS You should run:: semantic-release version twine upload dist/* # or whichever path your distributions are placed in semantic-release publish With steps 1-6 being handled by the :ref:`cmd-version` command, step 7 being left to the developer to handle, and lastly step 8 to be handled by the :ref:`cmd-publish` command. .. _breaking-removed-define-option: Removal of ``-D/--define`` command-line option """""""""""""""""""""""""""""""""""""""""""""" It is no longer possible to override arbitrary configuration values using the ``-D``/ ``--define`` option. You should provide the appropriate values via a configuration file using :ref:`cmd-main-option-config` or via the available command-line options. This simplifies the command-line option parsing significantly and is less error-prone, which has resulted in previous issues (e.g. `#600`_) with overrides on the command-line. Some of the configuration values expected by Python Semantic Release use complex data types such as lists or nested structures, which would be tedious and error-prone to specify using just command-line options. .. _#600: https://github.com/python-semantic-release/python-semantic-release/issues/600 .. _breaking-commands-no-verify-ci: Removal of CI verifications """"""""""""""""""""""""""" Prior to v8, Python Semantic Release would perform some prerequisite verification of environment variables before performing any version changes using the ``publish`` command. It's not feasible for Python Semantic Release to verify any possible CI environment fully, and these checks were only triggered if certain environment variables were set - they wouldn't fail locally. These checks previously raised :py:class:``semantic_release.CiVerificationError``, and were the only place in which this custom exception was used. Therefore, this exception has **also** been removed from Python Semantic Release in v8. If you were relying on this functionality, it's recommended that you add the following shell commands *before* invoking ``semantic-release`` to verify your environment: .. note:: In the following, $RELEASE_BRANCH refers to the git branch against which you run your releases using Python Semantic Release. You will need to ensure it is set properly (e.g. via ``export RELEASE_BRANCH=main`` and/or replace the variable with the branch name you want to verify the CI environment for. .. _breaking-commands-no-verify-ci-travis: Travis ~~~~~~ **Condition**: environment variable ``TRAVIS=true`` **Replacement**: .. code-block:: bash if ! [[ $TRAVIS_BRANCH == $RELEASE_BRANCH && \ $TRAVIS_PULL_REQUEST == 'false' ]]; then exit 1 fi .. _breaking-commands-no-verify-ci-semaphore: Semaphore ~~~~~~~~~ **Condition**: environment variable ``SEMAPHORE=true`` **Replacement**: .. code-block:: bash if ! [[ $BRANCH_NAME == $RELEASE_BRANCH && \ $SEMAPHORE_THREAD_RESULT != 'failed' && \ -n $PULL_REQUEST_NUMBER ]]; then exit 1 fi .. _breaking-commands-no-verify-ci-frigg: Frigg ~~~~~ **Condition**: environment variable ``FRIGG=true`` **Replacement**: .. code-block:: bash if ! [[ $FRIGG_BUILD_BRANCH == $RELEASE_BRANCH && \ -n $FRIGG_PULL_REQUEST ]]; then exit 1 fi .. _breaking-commands-no-verify-ci-circle-ci: Circle CI ~~~~~~~~~ **Condition**: environment variable ``CIRCLECI=true`` **Replacement**: .. code-block:: bash if ! [[ $CIRCLE_BRANCH == $RELEASE_BRANCH && \ -n $CI_PULL_REQUEST ]]; then exit 1 fi .. _breaking-commands-no-verify-ci-gitlab-ci: GitLab CI ~~~~~~~~~ **Condition**: environment variable ``GITLAB_CI=true`` **Replacement**: .. code-block:: bash if ! [[ $CI_COMMIT_REF_NAME == $RELEASE_BRANCH ]]; then exit 1 fi .. _breaking-commands-no-verify-ci-bitbucket: **Condition**: environment variable ``BITBUCKET_BUILD_NUMBER`` is set **Replacement**: .. code-block:: bash if ! [[ $BITBUCKET_BRANCH == $RELEASE_BRANCH && \ -n $BITBUCKET_PR_ID ]]; then exit 1 fi .. _breaking-commands-no-verify-ci-jenkins: Jenkins ~~~~~~~ **Condition**: environment variable ``JENKINS_URL`` is set **Replacement**: .. code-block:: bash if [[ -z $BRANCH_NAME ]]; then BRANCH_NAME=$BRANCH_NAME elif [[ -z $GIT_BRANCH ]]; then BRANCH_NAME=$GIT_BRANCH fi if ! [[ $BRANCH_NAME == $RELEASE_BRANCH && \ -n $CHANGE_ID ]]; then exit 1 fi .. _breaking-removed-build-status-checking: Removal of Build Status Checking """""""""""""""""""""""""""""""" Prior to v8, Python Semantic Release contained a configuration option, ``check_build_status``, which would attempt to prevent a release being made if it was possible to identify that a corresponding build pipeline was failing. For similar reasons to those motivating the removal of :ref:`CI Checks `, this feature has also been removed. If you are leveraging this feature in Python Semantic Release v7, the following bash commands will replace the functionality, and you can add these to your pipeline. You will need to install ``jq`` and ``curl`` to run these commands; they can be easily installed through your system's package manager, for example on Ubuntu: .. code-block:: bash sudo apt update && sudo apt upgrade sudo apt install -y curl jq On Windows, you can refer to the `installation guide for jq`_, and if ``curl`` is not already installed, you can download it from `the curl website`_ .. _installation guide for jq: https://jqlang.github.io/jq/download/ .. _the curl website: https://curl.se/ .. _breaking-removed-build-status-checking-github: GitHub ~~~~~~ .. code-block:: bash export RESP="$( curl \ -H "Authorization: token $GITHUB_TOKEN" \ -fSsL https://$GITHUB_API_DOMAIN/repos/$REPO_OWNER/$REPO_NAME/commits/$(git rev-parse HEAD)/status || exit 1 )" if [ $(jq -r '.state' <<< "$RESP") != "success" ]; then echo "Build status is not success" >&2 exit 1 fi Note that ``$GITHUB_API_DOMAIN`` is typically ``api.github.com`` unless you are using GitHub Enterprise with a custom domain name. .. _breaking-removed-build-status-checking-gitea: Gitea ~~~~~ .. code-block:: bash export RESP="$( curl \ -H "Authorization: token $GITEA_TOKEN" \ -fSsL https://$GITEA_DOMAIN/repos/$REPO_OWNER/$REPO_NAME/statuses/$(git rev-parse HEAD) || exit 1 )" if [ $(jq -r '.state' <<< "$RESP") != "success" ]; then echo "Build status is not success" >&2 exit 1 fi .. _breaking-removed-build-status-checking-gitlab: Gitlab ~~~~~~ .. code-block:: bash export RESP="$( curl \ -H "Authorization: token $GITLAB_TOKEN" \ -fSsL https://$GITLAB_DOMAIN/api/v4/projects/$PROJECT_ID/repository/commits/$(git rev-parse HEAD)/statuses )" for line in $(jq -r '.[] | [.name, .status, .allow_failure] | join("|")' <<<"$RESP"); do IFS="|" read -r job_name job_status allow_failure <<<"$line" if [ "$job_status" == "pending" ]; then echo "job $job_name is pending" >&2 exit 1 elif [ "$job_status" == "failed" ] && [ ! "$allow_failure" == "true" ]; then echo "job $job_name failed" >&2 exit 1 fi done .. _breaking-commands-multibranch-releases: Multibranch releases """""""""""""""""""" Prior to v8, Python Semantic Release would perform ``git checkout`` to switch to your configured release branch and determine if a release would need to be made. In v8 this has been changed - you must manually check out the branch which you would like to release against, and if you would like to create releases against this branch you must also ensure that it belongs to a :ref:`release group `. .. _breaking-commands-changelog: ``changelog`` command """"""""""""""""""""" A new option, :ref:`cmd-changelog-option-post-to-release-tag` has been added. If you omit this argument on the command line then the changelog rendering process, which is described in more detail at :ref:`changelog-templates-template-rendering`, will be triggered, but the new changelog will not be posted to any release. If you use this new command-line option, it should be set to a tag within the remote which has a corresponding release. For example, to update the changelog and post it to the release corresponding to the tag ``v1.1.4``, you should run:: semantic-release changelog --post-to-release-tag v1.1.4 .. _breaking-changelog-customization: Changelog customization """"""""""""""""""""""" A number of options relevant to customizing the changelog have been removed. This is because Python Semantic Release now supports authoring a completely custom `Jinja`_ template with the contents of your changelog. Historically, the number of options added to Python Semantic Release in order to allow this customization has grown significantly; it now uses templates in order to fully open up customizing the changelog's appearance. .. _Jinja: https://jinja.palletsprojects.com/en/3.1.x/ .. _breaking-configuration: Configuration ------------- The configuration structure has been completely reworked, so you should read :ref:`configuration` carefully during the process of upgrading to v8+. However, some common pitfalls and potential sources of confusion are summarized here. .. _breaking-configuration-setup-cfg-unsupported: ``setup.cfg`` is no longer supported """""""""""""""""""""""""""""""""""" Python Semantic Release no longer supports configuration via ``setup.cfg``. This is because the Python ecosystem is centering around ``pyproject.toml`` as universal tool and project configuration file, and TOML allows expressions via configuration, such as the mechanism for declaring configuration via environment variables, which introduce much greater complexity to support in the otherwise equivalent ``ini``-format configuration. You can use :ref:`cmd-generate-config` to generate new-format configuration that can be added to ``pyproject.toml``, and adjust the default settings according to your needs. .. warning:: If you don't already have a ``pyproject.toml`` configuration file, ``pip`` can change its behavior once you add one, as a result of `PEP-517`_. If you find that this breaks your packaging, you can add your Python Semantic Release configuration to a separate file such as ``semantic-release.toml``, and use the :ref:`--config ` option to reference this alternative configuration file. More detail about this issue can be found in this `pip issue`_. .. _PEP-517: https://peps.python.org/pep-0517/#evolutionary-notes .. _pip issue: https://github.com/pypa/pip/issues/8437#issuecomment-805313362 .. _breaking-commit-parser-options: Commit parser options """"""""""""""""""""" Options such as ``major_emoji``, ``parser_angular_patch_types`` or ``parser_angular_default_level_bump`` have been removed. Instead, these have been replaced with a single set of recognized commit parser options, ``allowed_tags``, ``major_tags``, ``minor_tags``, and ``patch_tags``, though the interpretation of these is up to the specific parsers in use. You can read more detail about using commit parser options in :ref:`commit_parser_options `, and if you need to parse multiple commit styles for a single project it's recommended that you create a parser following :ref:`commit_parser-custom_parser` that is tailored to the specific needs of your project. .. _breaking-version-variable-rename: ``version_variable`` """""""""""""""""""" This option has been renamed to :ref:`version_variables ` as it refers to a list of variables which can be updated. .. _breaking-version-pattern-removed: ``version_pattern`` """"""""""""""""""" This option has been removed. It's recommended to use an alternative tool to perform substitution using arbitrary regular expressions, such as ``sed``. You can always use Python Semantic Release to identify the next version to be created for a project and store this in an environment variable like so:: export VERSION=$(semantic-release version --print) .. _breaking-version-toml-type: ``version_toml`` """""""""""""""" This option will no longer accept a string or comma-separated string of version locations to be updated in TOML files. Instead, you must supply a ``List[str]``. For existing configurations using a single location in this option, you can simply wrap the value in ``[]``: .. code-block:: toml # Python Semantic Release v7 configuration [tool.semantic_release] version_toml = "pyproject.toml:tool.poetry.version" # Python Semantic Release v8 configuration [tool.semantic_release] version_toml = ["pyproject.toml:tool.poetry.version"] .. _breaking-tag-format-validation: ``tag_format`` """""""""""""" This option has the same effect as it did in Python Semantic Release prior to v8, but Python Semantic Release will now verify that it has a ``{version}`` format key and raise an error if this is not the case. .. _breaking-upload-to-release-rename: ``upload_to_release`` """"""""""""""""""""" This option has been renamed to :ref:`upload_to_vcs_release `. .. _breaking-custom-commit-parsers: Custom Commit Parsers --------------------- Previously, a custom commit parser had to satisfy the following criteria: * It should be ``import``-able from the virtual environment where the ``semantic-release`` is run * It should be a function which accepts the commit message as its only argument and returns a :py:class:`semantic_release.history.parser_helpers.ParsedCommit` if the commit is parsed successfully, or raise a :py:class:`semantic_release.UnknownCommitMessageStyleError` if parsing is unsuccessful. It is still possible to implement custom commit parsers, but the interface for doing so has been modified with stronger support for Python type annotations and broader input provided to the parser to enable capturing more information from each commit, such as the commit's date and author, if desired. A full guide to implementing a custom commit parser can be found at :ref:`commit_parser-custom_parser`. python-semantic-release-9.21.0/docs/multibranch_releases.rst000066400000000000000000000212441475670435200242400ustar00rootroot00000000000000.. _multibranch-releases: Multibranch Releases ==================== Python Semantic Release supports releases from multiple branches within your Git repository. You can elect to have a branch or set of branches create releases or `prereleases`_. There are no restrictions enforced on how you set up your releases, but be aware that if you create new releases from multiple branches, or prereleases from multiple independent branches using the same *prerelease token*, there is a chance that Python Semantic Release will calculate the next version to be the same on more than one branch (leading to an error that a Git tag already exists). .. note:: A "prerelease token" is the string used to suffix onto the 3-digit form of a full semantic version. For example, in the version ``1.2.3-beta.1``, the prerelease token is ``"beta"`` Typical strings used for pre-release tokens include "alpha", "beta", "dev" and "rc". These tend to indicate a level of maturity of the software associated with the version, but the specific meaning of each string is up to the project to decide. Generally, it's good practice to maintain a single branch from which full releases are made, and one branch at a time for each type of prerelease (alpha, beta, rc, etc). If you absolutely require tagging and (pre-)releases to take place from multiple branches where there's a risk that tags could conflict between branches, you can use the :ref:`--build-metadata ` command line argument to attach additional information (such as the branch name) to the tag in order to uniquely distinguish it from any other tags that might be calculated against other branches. Such a situation may occur in the following scenario: .. code-block:: O ----------- O <---- feature-1 / "feat: abc" / O -------- O --------------- O <---- main v1.0.0 v1.1.0 \ O ----------- O <---- feature-2 "feat: 123" Suppose that Python Semantic Release has been configured to use the same prerelease token ``"alpha"`` for all ``feature-*`` branches, and the default tag format ``"v{version}"``. In this case, running a pre-release from branch ``feature-1`` will recognize that since the last release, ``1.1.0``, a **feature** has been introduced and therefore the next tag to be applied to ``feature-1`` will be ``v1.2.0-alpha.1``. However, suppose we then try to run a release against ``feature-2``. This will also recognize that a **feature** has been introduced against the last released version of ``v1.1.0`` and therefore will try to create the tag ``v1.2.0-alpha.1``, leading to an error as this tag was already created against ``feature-1``. To get around this issue, you can pass the branch name as part of the build metadata: .. code-block:: shell semantic-release version --build-metadata $(git branch --show-current) This would lead to the tag ``v1.2.0-alpha.1+feature-1`` and ``v1.2.0-alpha.1+feature-2`` being applied to branches ``feature-1`` and ``feature-2``, respectively. Note that "`build metadata MUST be ignored`_" per the semver specification when comparing two versions, so these two prereleases would be considered equivalent semantic versions, but when merged to the branch configured to produce full releases (``main``), if released separately the changes from each branch would be released in two versions that would be considered different according to the semver specification. .. note:: If you have tags in your Git repository that are not valid semantic versions (which have then been formatted into your :ref:`tag_format `), these tags will be ignored for the purposes of calculating the next version. .. _prereleases: https://semver.org/#spec-item-9 .. _build metadata MUST be ignored: https://semver.org/#spec-item-10 .. _multibranch-releases-configuring: Configuring Multibranch Releases -------------------------------- Within your configuration file, you can create one or more groups of branches (*"release groups"*) that produce a certain type of release. Options are configured at the group level, and the group to use is chosen based on the *current branch name* against which Python Semantic Release is running. Each release group is configured as a nested mapping under the ``tool.semantic_release.branches`` key in ``pyproject.toml``, or the equivalent structure in other formats. the mapping requires a single key that is used as a name for the release group, which can help to identify it in log messages but has no effect on the behavior of the release. For example, Python Semantic Release has only one release group by default with the name ``main``. Inside each release group, the following key-value pairs can be set: +----------------------+----------+-----------+--------------------------------------------------------+ | Key | Required | Default | Description | +----------------------+----------+-----------+--------------------------------------------------------+ | match | Yes | N/A | A `Python regular expression`_ to match against the | | | | | active branch's name. If the branch name matches the | | | | | provided regular expression, then this release group | | | | | is chosen to provide the other configuration settings | | | | | available. | +----------------------+----------+-----------+--------------------------------------------------------+ | prerelease | No | ``false`` | Whether or not branches in this release group should | | | | | a prerelease instead of a full release | +----------------------+----------+-----------+--------------------------------------------------------+ | prerelease_token | No | ``rc`` | If creating a prerelease, specify the string to be | | | | | used as a prerelease token in any new versions created | | | | | against this branch. | +----------------------+----------+-----------+--------------------------------------------------------+ .. _Python regular expression: https://docs.python.org/3/library/re.html .. warning:: If two release groups have overlapping "match" patterns, i.e. a the name of a branch could theoretically match both patterns, then the release group which is defined first in your configuration file is used. Because of this, it's recommended that you place release groups with more specific match patterns higher up in your configuration file than those with patterns that would match a broader range of branch names. For example, suppose a project currently on version ``1.22.4`` is working on a new major version. The project wants to create a branch called ``2.x.x`` against which they will develop the new major version, and they would like to create "release candidate" ("rc") prereleases from this branch. There are also a number of new features to integrate, and the project has agreed that all such branches should be named according to the convention ``next-{developer initials}-{issue number}``, leading to branches named similarly to ``next-bc-prj-123``. The project would like to release with tags that include some way to identify the branch and date on which the release was made from the tag. This project would be able to leverage the following configuration to achieve the above requirements from their release configuration: .. code-block:: toml [tool.semantic_release.branches.main] match = "(main|master)" prerelease = false [tool.semantic_release.branches."2.x.x"] match = "2.x.x" prerelease = true prerelease_token = "rc" [tool.semantic_release.branches."2.x.x New Features"] match = "next-\\w+-prj-\\d+" prerelease = true prerelease_token = "alpha" In a CI pipeline, the following command would allow attaching the date and branch name to the versions that are produced (note this example uses the UNIX ``date`` command): .. code-block:: bash semantic-release version \ --build-metadata "$(git branch --show-current).$(date +%Y%m%d)" This would lead to versions such as ``1.1.1+main.20221127`` or ``2.0.0-rc.4+2.x.x.20221201``. .. note:: Remember that is always possible to override the release rules configured by using the :ref:`cmd-version-option-force-level` and :ref:`cmd-version-option-as-prerelease` flags. python-semantic-release-9.21.0/docs/psr_changelog.rst000066400000000000000000000000361475670435200226540ustar00rootroot00000000000000.. include:: ../CHANGELOG.rst python-semantic-release-9.21.0/docs/strict_mode.rst000066400000000000000000000045421475670435200223630ustar00rootroot00000000000000.. _strict-mode: Strict Mode =========== Strict Mode is enabled by use of the :ref:`strict ` parameter to the main command for Python Semantic Release. Strict Mode alters the behavior of Python Semantic Release when certain conditions are encountered that prevent Python Semantic Release from performing an action. Typically, this will result in a warning becoming an error, or a different exit code (0 vs non-zero) being produced when Python Semantic Release exits early. For example: .. code-block:: bash #!/usr/bin/bash set -euo pipefail git checkout $NOT_A_RELEASE_BRANCH pip install \ black \ isort \ twine \ pytest \ python-semantic-release isort . # sort imports black . # format the code pytest # test the code semantic-release --strict version # ERROR - not a release branch twine upload dist/* # publish the code Using Strict Mode with the ``--strict`` flag ensures this simple pipeline will fail while running ``semantic-release``, as the non-zero exit code will cause it to stop when combined with the ``-e`` option. Without Strict Mode, the ``semantic-release`` command will exit with code 0, causing the above pipeline to continue. The specific effects of enabling Strict Mode are detailed below. .. _strict-mode-not-a-release-branch: Non-Release Branches ~~~~~~~~~~~~~~~~~~~~ When running in Strict Mode, invoking Python Semantic Release on a non-Release branch will cause an error with a non-zero exit code. This means that you can prevent an automated script from running further against branches you do not want to release from, for example in multibranch CI pipelines. Running without Strict Mode will allow subsequent steps in the pipeline to also execute, but be aware that certain actions that Python Semantic Release may perform for you will likely not have been carried out, such as writing to files or creating a git commit in your repository. .. seealso:: - :ref:`multibranch-releases` .. _strict-mode-version-already-released: Version Already Released/No Release To Be Made ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ When Strict Mode is not enabled and Python Semantic Release identifies that no release needs to be made, it will exit with code 0. You can cause Python Semantic Release to raise an error if no release needs to be made by enabling Strict Mode. python-semantic-release-9.21.0/docs/troubleshooting.rst000066400000000000000000000026531475670435200232770ustar00rootroot00000000000000.. _troubleshooting: Troubleshooting =============== - Check your configuration file for :ref:`configuration` - Check your Git tags match your :ref:`tag_format `; tags using other formats are ignored during calculation of the next version. .. _troubleshooting-verbosity: Increasing Verbosity -------------------- If you are having trouble with Python Semantic Release or would like to see additional information about the actions that it is taking, you can use the top-level :ref:`cmd-main-option-verbosity` option. This can be supplied multiple times to increase the logging verbosity of the :ref:`cmd-main` command or any of its subcommands during their execution. You can supply this as many times as you like, but supplying more than twice has no effect. Supply :ref:`cmd-main-option-verbosity` once for ``INFO`` output, and twice for ``DEBUG``. For example:: semantic-release -vv version --print .. note:: The :ref:`cmd-main-option-verbosity` option must be supplied to the top-level ``semantic-release`` command, before the name of any sub-command. .. warning:: The volume of logs when using ``DEBUG`` verbosity may be significantly increased, compared to ``INFO`` or the default ``WARNING``, and as a result executing commands with ``semantic-release`` may be significantly slower than when using ``DEBUG``. .. note:: The provided GitHub action sets the verbosity level to INFO by default. python-semantic-release-9.21.0/pyproject.toml000066400000000000000000000243311475670435200212770ustar00rootroot00000000000000# Ref: https://packaging.python.org/en/latest/specifications/declaring-project-metadata/ # and https://setuptools.pypa.io/en/latest/userguide/pyproject_config.html [build-system] requires = ["setuptools ~= 75.3.0", "wheel ~= 0.42"] build-backend = "setuptools.build_meta" [project] name = "python-semantic-release" version = "9.21.0" description = "Automatic Semantic Versioning for Python projects" requires-python = ">=3.8" license = { text = "MIT" } classifiers = [ "Programming Language :: Python", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.13", ] readme = "README.rst" authors = [{ name = "Rolf Erik Lekang", email = "me@rolflekang.com" }] dependencies = [ "click ~= 8.0", "click-option-group ~= 0.5", "gitpython ~= 3.0", "requests ~= 2.25", "jinja2 ~= 3.1", "python-gitlab ~= 4.0", "tomlkit ~= 0.11", "dotty-dict ~= 1.3", "importlib-resources ~= 6.0", "pydantic ~= 2.0", "rich ~= 13.0", "shellingham ~= 1.5", "Deprecated ~= 1.2", # Backport of deprecated decorator for python 3.8 ] [project.scripts] python-semantic-release = "semantic_release.__main__:main" semantic-release = "semantic_release.__main__:main" psr = "semantic_release.__main__:main" [project.urls] changelog = "https://github.com/python-semantic-release/python-semantic-release/blob/master/CHANGELOG.md" documentation = "https://python-semantic-release.readthedocs.io" homepage = "https://python-semantic-release.readthedocs.io" issues = "https://github.com/python-semantic-release/python-semantic-release/issues" repository = "http://github.com/python-semantic-release/python-semantic-release.git" [project.optional-dependencies] build = [ "build ~= 1.2" ] docs = [ "Sphinx ~= 6.0", "sphinxcontrib-apidoc == 0.5.0", "sphinx-autobuild == 2024.2.4", "furo ~= 2024.1", ] test = [ "coverage[toml] ~= 7.0", "filelock ~= 3.15", "flatdict ~= 4.0", "freezegun ~= 1.5", "pyyaml ~= 6.0", "pytest ~= 8.3", "pytest-clarity ~= 1.0", "pytest-cov ~= 5.0", "pytest-env ~= 1.0", "pytest-lazy-fixtures ~= 1.1.1", "pytest-mock ~= 3.0", "pytest-order ~= 1.3", "pytest-pretty ~= 1.2", "pytest-xdist ~= 3.0", "responses ~= 0.25.0", "requests-mock ~= 1.10", ] dev = [ "pre-commit ~= 3.5", "tox ~= 4.11", "ruff == 0.6.1" ] mypy = [ "mypy == 1.15.0", "types-Deprecated ~= 1.2", "types-requests ~= 2.32.0", "types-pyyaml ~= 6.0", ] [tool.setuptools] include-package-data = true [tool.setuptools.packages.find] where = ["src"] [tool.pytest.ini_options] env = [ "PYTHONHASHSEED = 123456" ] addopts = [ # TO DEBUG in single process, swap auto to 0 # "-nauto", # "-n0", "-ra", "--diff-symbols", "--durations=20", # No default coverage - causes problems with debuggers # "--cov=semantic_release", # "--cov-context=test", # "--cov-report=html:coverage-html", # "--cov-report=term-missing", ] testpaths = [ "tests" ] markers = [ "unit: mark a test as a unit test", "e2e: mark a test as a end-to-end test", "comprehensive: mark a test as a comprehensive (multiple variations) test", ] [tool.coverage.html] show_contexts = true [tool.coverage.run] omit = ["*/tests/*"] [tool.bandit] targets = ["semantic_release"] [tool.tox] legacy_tox_ini = """ [tox] envlist = mypy, py{38,39,310,311,312}, coverage ruff skipsdist = True [testenv] passenv = CI setenv = PYTHONPATH = {toxinidir} TESTING = True deps = .[test] commands = coverage run -p --source=semantic_release -m pytest {posargs:tests} [testenv:mypy] deps = .[mypy] commands = mypy . [testenv:coverage] deps = coverage[toml] commands = coverage combine coverage report -m coverage xml [testenv:ruff] deps = .[dev] commands = ruff check . --statistics --output-format=text """ [tool.mypy] python_version = "3.8" show_column_numbers = true show_error_context = true pretty = true error_summary = true follow_imports = "normal" enable_error_code = ["ignore-without-code"] disallow_untyped_calls = true # warn_return_any = true strict_optional = true warn_no_return = true warn_redundant_casts = true # warn_unused_ignores = true warn_unused_configs = true # warn_unreachable = true disallow_untyped_defs = true check_untyped_defs = true cache_dir = "/dev/null" plugins = ["pydantic.mypy"] [[tool.mypy.overrides]] module = "tests.*" disallow_untyped_defs = false [[tool.mypy.overrides]] module = "flatdict" ignore_missing_imports = true [[tool.mypy.overrides]] module = "shellingham" ignore_missing_imports = true [[tool.mypy.overrides]] module = "dotty_dict" ignore_missing_imports = true [tool.ruff] line-length = 88 target-version = "py38" force-exclude = true output-format = "grouped" show-fixes = true src = ["semantic_release", "tests"] [tool.ruff.lint] select = ["ALL"] # See https://docs.astral.sh/ruff/rules/ # for any of these codes you can also run `ruff rule [CODE]` # which explains it in the terminal ignore = [ # attribute shadows builtin (e.g. Foo.list()) "A003", # Annotations (flake8-annotations) # missing "self" type-hint "ANN101", "ANN102", "ANN401", # flake8-bugbear "B019", # flake8-commas "COM", # Missing docstrings - eventually want to enable "D100", "D101", "D102", "D103", "D104", "D105", "D107", "D203", "D205", "D212", "D400", "D401", "D404", "D415", # flake8-datetimez "DTZ", # flake8-errmsg "EM", # Some todos and some examples; leave this disabled for now "ERA001", # don't compare types, use isinstance() # sometimes using type(x) == y is deliberately chosen to exclude # subclasses "E721", # flake8-fixme "FIX", # flake8-boolean-trap "FBT", # No implicit packages "INP001", # Errors should end with "Error" "N818", # mypy prevents blanket-type-ignore "PGH003", # Fixtures that do not return a value need an underscore prefix. The rule # does not handle generators. "PT004", # flake8-pytest-style, values rowtype (list|tuple) "PT007", # pytest.raises needs a match - eventually want to enable "PT011", "PT012", "PT013", # pylint "PLR", "PLE1507", # flake8-use-pathlib "PTH", # flake8-raise "RSE", # ruff # This seems to flag a load of false-positives, thinking that the # noqa's are # unneeded and trying to fix them even though ruff then flags the errors that # were previously ignored "RUF100", # flake8-todos "TD002", "TD003", # tryceratops "TRY003", "TRY401", # other errors that conflict with ruff format # indentation-with-invalid-multiple "W191", "E111", "E114", "E117", "E501", "D206", "D300", "Q000", "Q001", "Q002", "Q003", "COM812", "COM812", "ISC001", "ISC002", ] external = ["V"] task-tags = ["NOTE", "TODO", "FIXME", "XXX"] [tool.ruff.format] docstring-code-format = true indent-style = "space" line-ending = "lf" quote-style = "double" [tool.ruff.lint.per-file-ignores] # Imported but unused "__init__.py" = ["F401"] # pydantic 1 can't handle __future__ annotations-enabled syntax on < 3.10 "src/semantic_release/cli/config.py" = ["UP", "TCH"] "src/semantic_release/commit_parser/*" = ["UP", "FA", "TCH"] # Method argument not used. This is mostly a base class # anyway "src/semantic_release/hvcs/_base.py" = ["ARG002"] # from tests.fixtures import * is deliberate "tests/conftest.py" = ["F403"] "tests/fixtures/**/__init__.py" = ["F403"] "tests/*" = [ # unused arguments - likely fixtures to be moved to # @pytest.mark.usefixtures "ARG001", # "assert" used "S101", # possible hard-coded password assigned to argument # because we use "prerelease_token=..." and bandit panics # when it sees *_token variables "S105", "S106", "S107", # pseudo-random generators not suitable for cryptographic purposes # (e.g. random.choice used) "S311", # Private member access "SLF001", # Annotations "ANN", # Using format instead of f-string for readablity "UP032", ] [tool.ruff.lint.mccabe] max-complexity = 10 [tool.ruff.lint.flake8-implicit-str-concat] allow-multiline = true [tool.ruff.lint.flake8-quotes] inline-quotes = "double" multiline-quotes = "double" [tool.ruff.lint.flake8-tidy-imports] ban-relative-imports = "all" [tool.ruff.lint.flake8-type-checking] strict = true [tool.ruff.lint.flake8-pytest-style] fixture-parentheses = false mark-parentheses = false parametrize-names-type = "csv" [tool.ruff.lint.isort] # required-imports = ["from __future__ import annotations"] combine-as-imports = true known-first-party = ["semantic_release"] forced-separate = ["tests"] relative-imports-order = "closest-to-furthest" section-order = [ "future", "standard-library", "third-party", "first-party", "tests", "local-folder", ] sections = { "tests" = ["tests"] } [tool.vulture] ignore_names = ["change_to_ex_proj_dir", "init_example_project"] [tool.semantic_release] logging_use_named_masks = true commit_parser = "conventional" commit_parser_options = { parse_squash_commits = true, ignore_merge_commits = true } build_command = """ python -m pip install -e .[build] python -m build . """ major_on_zero = true version_variables = ["src/semantic_release/__init__.py:__version__"] version_toml = ["pyproject.toml:project.version"] [tool.semantic_release.changelog] # default_templates = { changelog_file = "CHANGELOG.rst" } exclude_commit_patterns = [ '''chore(?:\([^)]*?\))?: .+''', '''ci(?:\([^)]*?\))?: .+''', '''refactor(?:\([^)]*?\))?: .+''', '''style(?:\([^)]*?\))?: .+''', '''test(?:\([^)]*?\))?: .+''', '''build\((?!deps\): .+)''', '''Merged? .*''', '''Initial Commit.*''', # Old semantic-release version commits '''^\d+\.\d+\.\d+''', ] insertion_flag = "=========\nCHANGELOG\n=========" mode = "update" template_dir = "config/release-templates" [tool.semantic_release.branches.main] match = "(main|master)" prerelease = false prerelease_token = "rc" [tool.semantic_release.branches.alpha] match = "^(feat|fix|perf)/.+" prerelease = true prerelease_token = "alpha" [tool.semantic_release.branches.dev] match = ".+" prerelease = true prerelease_token = "dev" [tool.semantic_release.remote] type = "github" token = { env = "GH_TOKEN" } [tool.semantic_release.publish] upload_to_vcs_release = true python-semantic-release-9.21.0/scripts/000077500000000000000000000000001475670435200200475ustar00rootroot00000000000000python-semantic-release-9.21.0/scripts/__init__.py000066400000000000000000000000001475670435200221460ustar00rootroot00000000000000python-semantic-release-9.21.0/scripts/bump_version_in_docs.py000066400000000000000000000042611475670435200246320ustar00rootroot00000000000000# ruff: noqa: T201, allow print statements in non-prod scripts from __future__ import annotations from os import getenv from pathlib import Path from re import compile as regexp # Constants PROJ_DIR = Path(__file__).resolve().parent.parent DOCS_DIR = PROJ_DIR / "docs" version_replace_pattern = regexp(r"\$(NEW_VERSION|{NEW_VERSION})") tag_replace_pattern = regexp(r"\$(NEW_RELEASE_TAG|{NEW_RELEASE_TAG})") def update_github_actions_example(filepath: Path, release_tag: str) -> None: psr_regex = regexp(r"(uses: python-semantic-release/python-semantic-release)@\S+$") psr_publish_action_regex = regexp( r"(uses: python-semantic-release/publish-action)@\S+$" ) file_content_lines: list[str] = filepath.read_text().splitlines() for regex in [psr_regex, psr_publish_action_regex]: file_content_lines = list( map( lambda line, regex=regex: regex.sub(r"\1@" + release_tag, line), # type: ignore[misc] file_content_lines, ) ) print(f"Bumping version in {filepath} to", release_tag) filepath.write_text(str.join("\n", file_content_lines) + "\n") def envsubst(filepath: Path, version: str, release_tag: str) -> None: file_content = filepath.read_text() found = False for pattern, replacement in [ (version_replace_pattern, version), (tag_replace_pattern, release_tag), ]: if not found and (found := bool(pattern.search(file_content))): print(f"Applying envsubst to {filepath}") file_content = pattern.sub(replacement, file_content) filepath.write_text(file_content) if __name__ == "__main__": new_release_tag = getenv("NEW_RELEASE_TAG") new_version = getenv("NEW_VERSION") if not new_release_tag: print("NEW_RELEASE_TAG environment variable is not set") exit(1) if not new_version: print("NEW_VERSION environment variable is not set") exit(1) update_github_actions_example( DOCS_DIR / "automatic-releases" / "github-actions.rst", new_release_tag ) for doc_file in DOCS_DIR.rglob("*.rst"): envsubst(filepath=doc_file, version=new_version, release_tag=new_release_tag) python-semantic-release-9.21.0/src/000077500000000000000000000000001475670435200171475ustar00rootroot00000000000000python-semantic-release-9.21.0/src/semantic_release/000077500000000000000000000000001475670435200224525ustar00rootroot00000000000000python-semantic-release-9.21.0/src/semantic_release/__init__.py000066400000000000000000000023151475670435200245640ustar00rootroot00000000000000"""Python Semantic Release""" from __future__ import annotations from semantic_release.commit_parser import ( CommitParser, ParsedCommit, ParseError, ParseResult, ParseResultType, ParserOptions, ) from semantic_release.enums import LevelBump from semantic_release.errors import ( CommitParseError, InvalidConfiguration, InvalidVersion, SemanticReleaseBaseError, ) from semantic_release.version import ( Version, VersionTranslator, next_version, tags_and_versions, ) __version__ = "9.21.0" __all__ = [ "CommitParser", "ParsedCommit", "ParseError", "ParseResult", "ParseResultType", "ParserOptions", "LevelBump", "SemanticReleaseBaseError", "CommitParseError", "InvalidConfiguration", "InvalidVersion", "Version", "VersionTranslator", "next_version", "tags_and_versions", ] def setup_hook(argv: list[str]) -> None: """ A hook to be used in setup.py to enable `python setup.py publish`. :param argv: sys.argv """ if len(argv) > 1 and any( cmd in argv for cmd in ["version", "publish", "changelog"] ): from semantic_release.cli.commands.main import main main() python-semantic-release-9.21.0/src/semantic_release/__main__.py000066400000000000000000000032501475670435200245440ustar00rootroot00000000000000"""Entrypoint for the `semantic-release` module.""" # ruff: noqa: T201, print statements are fine here as this is for cli entry only from __future__ import annotations import sys from traceback import format_exception from semantic_release import globals from semantic_release.cli.commands.main import main as cli_main from semantic_release.enums import SemanticReleaseLogLevels def main() -> None: try: cli_main(args=sys.argv[1:]) print("semantic-release completed successfully.", file=sys.stderr) except KeyboardInterrupt: print("\n-- User Abort! --", file=sys.stderr) sys.exit(127) except Exception as err: # noqa: BLE001, graceful error handling across application if globals.log_level <= SemanticReleaseLogLevels.DEBUG: print(f"{err.__class__.__name__}: {err}\n", file=sys.stderr) etype, value, traceback = sys.exc_info() print( str.join( "", format_exception( etype, value, traceback, limit=None, chain=True, )[:-1], ), file=sys.stderr, ) print( str.join("\n", [f"::ERROR:: {line}" for line in str(err).splitlines()]), file=sys.stderr, ) if globals.log_level > SemanticReleaseLogLevels.DEBUG: print( "Run semantic-release in very verbose mode (-vv) to see the full traceback.", file=sys.stderr, ) sys.exit(1) if __name__ == "__main__": main() python-semantic-release-9.21.0/src/semantic_release/changelog/000077500000000000000000000000001475670435200244015ustar00rootroot00000000000000python-semantic-release-9.21.0/src/semantic_release/changelog/__init__.py000066400000000000000000000004061475670435200265120ustar00rootroot00000000000000from semantic_release.changelog.context import ( ChangelogContext, make_changelog_context, ) from semantic_release.changelog.release_history import ReleaseHistory from semantic_release.changelog.template import ( environment, recursive_render, ) python-semantic-release-9.21.0/src/semantic_release/changelog/context.py000066400000000000000000000135441475670435200264460ustar00rootroot00000000000000from __future__ import annotations import logging import os from dataclasses import dataclass from enum import Enum from pathlib import Path, PurePosixPath from re import compile as regexp from typing import TYPE_CHECKING, Any, Callable, Literal from urllib3.util import Url from semantic_release.const import PYPI_WEB_DOMAIN from semantic_release.helpers import sort_numerically if TYPE_CHECKING: # pragma: no cover from jinja2 import Environment from semantic_release.changelog.release_history import Release, ReleaseHistory from semantic_release.hvcs._base import HvcsBase from semantic_release.version.version import Version @dataclass class ReleaseNotesContext: repo_name: str repo_owner: str hvcs_type: str version: Version release: Release mask_initial_release: bool license_name: str filters: tuple[Callable[..., Any], ...] = () def bind_to_environment(self, env: Environment) -> Environment: env_globals = dict( filter(lambda k_v: k_v[0] != "filters", self.__dict__.items()) ) for g, v in env_globals.items(): env.globals[g] = v for f in self.filters: env.filters[f.__name__] = f return env class ChangelogMode(Enum): INIT = "init" UPDATE = "update" @dataclass class ChangelogContext: repo_name: str repo_owner: str hvcs_type: str history: ReleaseHistory changelog_mode: Literal["update", "init"] prev_changelog_file: str changelog_insertion_flag: str mask_initial_release: bool filters: tuple[Callable[..., Any], ...] = () def bind_to_environment(self, env: Environment) -> Environment: env.globals["context"] = self env.globals["ctx"] = self for f in self.filters: env.filters[f.__name__] = f return env def make_changelog_context( hvcs_client: HvcsBase, release_history: ReleaseHistory, mode: ChangelogMode, prev_changelog_file: Path, insertion_flag: str, mask_initial_release: bool, ) -> ChangelogContext: return ChangelogContext( repo_name=hvcs_client.repo_name, repo_owner=hvcs_client.owner, history=release_history, changelog_mode=mode.value, changelog_insertion_flag=insertion_flag, mask_initial_release=mask_initial_release, prev_changelog_file=str(prev_changelog_file), hvcs_type=hvcs_client.__class__.__name__.lower(), filters=( *hvcs_client.get_changelog_context_filters(), create_pypi_url, read_file, convert_md_to_rst, autofit_text_width, sort_numerically, ), ) def create_pypi_url(package_name: str, version: str = "") -> str: project_name = package_name.strip("/").strip() if not project_name: raise ValueError("package_name must not be empty!") return Url( scheme="https", host=PYPI_WEB_DOMAIN, path=str(PurePosixPath("project", project_name, version.strip("/").strip())), ).url.rstrip("/") def read_file(filepath: str) -> str: try: if not filepath: raise FileNotFoundError("No file path provided") # noqa: TRY301 with Path(filepath).open(newline=os.linesep) as rfd: return rfd.read() except FileNotFoundError as err: logging.warning(err) return "" def convert_md_to_rst(md_content: str) -> str: rst_content = md_content replacements = { # Replace markdown doubleunder bold with rst bold "bold-inline": (regexp(r"(?<=\s)__(.+?)__(?=\s|$)"), r"**\1**"), # Replace markdown italics with rst italics "italic-inline": (regexp(r"(?<=\s)_([^_].+?[^_])_(?=\s|$)"), r"*\1*"), # Replace markdown bullets with rst bullets "bullets": (regexp(r"^(\s*)-(\s)"), r"\1*\2"), # Replace markdown inline raw content with rst inline raw content "raw-inline": (regexp(r"(?<=\s)(`[^`]+`)(?![`_])"), r"`\1`"), # Replace markdown inline link with rst inline link "link-inline": ( regexp(r"(?<=\s)\[([^\]]+)\]\(([^)]+)\)(?=\s|$)"), r"`\1 <\2>`_", ), } for pattern, replacement in replacements.values(): rst_content = pattern.sub(replacement, rst_content) return rst_content def autofit_text_width(text: str, maxwidth: int = 100, indent_size: int = 0) -> str: """Format the description text to fit within a specified width""" input_text = text.strip() if len(input_text) <= maxwidth: # If the text is already within the maxwidth, return immediately return input_text indent = " " * indent_size formatted_description = [] # Re-format text to fit within the maxwidth for paragraph in input_text.split("\n\n"): formatted_paragraph = [] # Split the paragraph into words with no empty strings words = list( filter( None, paragraph.replace("\r", "").replace("\n", " ").strip().split(" ") ) ) # Initialize the line for each paragraph line = words[0] next_line = "" for word in words[1:]: # Check if the current line + the next word (and a space) will fit within the maxwidth # If it does, then update the current line next_line = f"{line} {word}" if len(next_line) <= maxwidth: line = next_line continue # Add the current line to the paragraph and start a new line formatted_paragraph.append(line) line = f"{indent}{word}" # Store the last line in the paragraph since it hasn't reached the maxwidth yet formatted_paragraph.append(line) # formatted_description.append(str.join("\n", formatted_paragraph)) # Print the formatted description return str.join("\n\n", formatted_description).strip() python-semantic-release-9.21.0/src/semantic_release/changelog/release_history.py000066400000000000000000000245221475670435200301610ustar00rootroot00000000000000from __future__ import annotations import logging from collections import defaultdict from datetime import datetime, timedelta, timezone from typing import TYPE_CHECKING, TypedDict from git.objects.tag import TagObject from semantic_release.commit_parser import ParseError from semantic_release.commit_parser.token import ParsedCommit from semantic_release.commit_parser.util import force_str from semantic_release.enums import LevelBump from semantic_release.helpers import validate_types_in_sequence from semantic_release.version.algorithm import tags_and_versions if TYPE_CHECKING: # pragma: no cover from re import Pattern from typing import Iterable, Iterator from git.repo.base import Repo from git.util import Actor from semantic_release.commit_parser import ( CommitParser, ParseResult, ParserOptions, ) from semantic_release.version.translator import VersionTranslator from semantic_release.version.version import Version log = logging.getLogger(__name__) class ReleaseHistory: @classmethod def from_git_history( cls, repo: Repo, translator: VersionTranslator, commit_parser: CommitParser[ParseResult, ParserOptions], exclude_commit_patterns: Iterable[Pattern[str]] = (), ) -> ReleaseHistory: all_git_tags_and_versions = tags_and_versions(repo.tags, translator) unreleased: dict[str, list[ParseResult]] = defaultdict(list) released: dict[Version, Release] = {} # Performance optimization: create a mapping of tag sha to version # so we can quickly look up the version for a given commit based on sha tag_sha_2_version_lookup = { tag.commit.hexsha: (tag, version) for tag, version in all_git_tags_and_versions } ignore_merge_commits = bool( hasattr(commit_parser, "options") and hasattr(commit_parser.options, "ignore_merge_commits") and getattr(commit_parser.options, "ignore_merge_commits") # noqa: B009 ) # Strategy: # Loop through commits in history, parsing as we go. # Add these commits to `unreleased` as a key-value mapping # of type_ to ParseResult, until we encounter a tag # which matches a commit. # Then, we add the version for that tag as a key to `released`, # and set the value to an empty dict. Into that empty dict # we place the key-value mapping type_ to ParseResult as before. # We do this until we encounter a commit which another tag matches. the_version: Version | None = None for commit in repo.iter_commits("HEAD", topo_order=True): # Determine if we have found another release log.debug("checking if commit %s matches any tags", commit.hexsha[:7]) t_v = tag_sha_2_version_lookup.get(commit.hexsha, None) if t_v is None: log.debug("no tags correspond to commit %s", commit.hexsha) else: # Unpack the tuple (overriding the current version) tag, the_version = t_v # we have found the latest commit introduced by this tag # so we create a new Release entry log.debug("found commit %s for tag %s", commit.hexsha, tag.name) # tag.object is a Commit if the tag is lightweight, otherwise # it is a TagObject with additional metadata about the tag if isinstance(tag.object, TagObject): tagger = tag.object.tagger committer = tag.object.tagger.committer() _tz = timezone(timedelta(seconds=-1 * tag.object.tagger_tz_offset)) tagged_date = datetime.fromtimestamp(tag.object.tagged_date, tz=_tz) else: # For some reason, sometimes tag.object is a Commit tagger = tag.object.author committer = tag.object.author _tz = timezone(timedelta(seconds=-1 * tag.object.author_tz_offset)) tagged_date = datetime.fromtimestamp( tag.object.committed_date, tz=_tz ) release = Release( tagger=tagger, committer=committer, tagged_date=tagged_date, elements=defaultdict(list), version=the_version, ) released.setdefault(the_version, release) log.info( "parsing commit [%s] %s", commit.hexsha[:8], str(commit.message).replace("\n", " ")[:54], ) # returns a ParseResult or list of ParseResult objects, # it is usually one, but we split a commit if a squashed merge is detected parse_results = commit_parser.parse(commit) if not any( ( isinstance(parse_results, (ParseError, ParsedCommit)), ( ( isinstance(parse_results, list) or type(parse_results) == tuple ) and validate_types_in_sequence( parse_results, (ParseError, ParsedCommit) ) ), ) ): raise TypeError("Unexpected type returned from commit_parser.parse") results: list[ParseResult] = [ *( [parse_results] if isinstance(parse_results, (ParseError, ParsedCommit)) else parse_results ), ] is_squash_commit = bool(len(results) > 1) # iterate through parsed commits to add to changelog definition for parsed_result in results: commit_message = force_str(parsed_result.commit.message) commit_type = ( "unknown" if isinstance(parsed_result, ParseError) else parsed_result.type ) log.debug("commit has type '%s'", commit_type) has_exclusion_match = any( pattern.match(commit_message) for pattern in exclude_commit_patterns ) commit_level_bump = ( LevelBump.NO_RELEASE if isinstance(parsed_result, ParseError) else parsed_result.bump ) if ignore_merge_commits and parsed_result.is_merge_commit(): log.info("Excluding merge commit[%s]", parsed_result.short_hash) continue # Skip excluded commits except for any commit causing a version bump # Reasoning: if a commit causes a version bump, and no other commits # are included, then the changelog will be empty. Even if ther was other # commits included, the true reason for a version bump would be missing. if has_exclusion_match and commit_level_bump == LevelBump.NO_RELEASE: log.info( "Excluding %s commit[%s] %s", "piece of squashed" if is_squash_commit else "", parsed_result.short_hash, commit_message.split("\n", maxsplit=1)[0][:20], ) continue if ( isinstance(parsed_result, ParsedCommit) and not parsed_result.include_in_changelog ): log.info( str.join( " ", [ "Excluding commit[%s] because parser determined", "it should not included in the changelog", ], ), parsed_result.short_hash, ) continue if the_version is None: log.info( "[Unreleased] adding commit[%s] to unreleased '%s'", parsed_result.short_hash, commit_type, ) unreleased[commit_type].append(parsed_result) continue log.info( "[%s] adding commit[%s] to release '%s'", the_version, parsed_result.short_hash, commit_type, ) released[the_version]["elements"][commit_type].append(parsed_result) return cls(unreleased=unreleased, released=released) def __init__( self, unreleased: dict[str, list[ParseResult]], released: dict[Version, Release] ) -> None: self.released = released self.unreleased = unreleased def __iter__( self, ) -> Iterator[dict[str, list[ParseResult]] | dict[Version, Release]]: """ Enables unpacking: >>> rh = ReleaseHistory(...) >>> unreleased, released = rh """ yield self.unreleased yield self.released def release( self, version: Version, tagger: Actor, committer: Actor, tagged_date: datetime ) -> ReleaseHistory: if version in self.released: raise ValueError(f"{version} has already been released!") # return a new instance to avoid potential accidental # mutation return ReleaseHistory( unreleased={}, released={ version: { "tagger": tagger, "committer": committer, "tagged_date": tagged_date, "elements": self.unreleased, "version": version, }, **self.released, }, ) def __repr__(self) -> str: return ( f"<{type(self).__qualname__}: " f"{sum(len(commits) for commits in self.unreleased.values())} " f"commits unreleased, {len(self.released)} versions released>" ) class Release(TypedDict): tagger: Actor committer: Actor tagged_date: datetime elements: dict[str, list[ParseResult]] version: Version python-semantic-release-9.21.0/src/semantic_release/changelog/template.py000066400000000000000000000130731475670435200265720ustar00rootroot00000000000000from __future__ import annotations import logging import os import shutil from pathlib import Path, PurePosixPath from typing import TYPE_CHECKING from jinja2 import FileSystemLoader from jinja2.sandbox import SandboxedEnvironment from semantic_release.helpers import dynamic_import if TYPE_CHECKING: # pragma: no cover from typing import Callable, Iterable, Literal from jinja2 import Environment log = logging.getLogger(__name__) # pylint: disable=too-many-arguments,too-many-locals def environment( template_dir: Path | str = ".", block_start_string: str = "{%", block_end_string: str = "%}", variable_start_string: str = "{{", variable_end_string: str = "}}", comment_start_string: str = "{#", comment_end_string: str = "#}", line_statement_prefix: str | None = None, line_comment_prefix: str | None = None, trim_blocks: bool = False, lstrip_blocks: bool = False, newline_sequence: Literal["\n", "\r", "\r\n"] = "\n", keep_trailing_newline: bool = False, extensions: Iterable[str] = (), autoescape: bool | str = True, ) -> SandboxedEnvironment: """ Create a jinja2.sandbox.SandboxedEnvironment with certain parameter resrictions. For example the Loader is fixed to FileSystemLoader, although the searchpath is configurable. ``autoescape`` can be a string in which case it should follow the convention ``module:attr``, in this instance it will be dynamically imported. See https://jinja.palletsprojects.com/en/3.1.x/api/#jinja2.Environment for full parameter descriptions """ autoescape_value: bool | Callable[[str | None], bool] if isinstance(autoescape, str): autoescape_value = dynamic_import(autoescape) else: autoescape_value = autoescape return ComplexDirectorySandboxedEnvironment( block_start_string=block_start_string, block_end_string=block_end_string, variable_start_string=variable_start_string, variable_end_string=variable_end_string, comment_start_string=comment_start_string, comment_end_string=comment_end_string, line_statement_prefix=line_statement_prefix, line_comment_prefix=line_comment_prefix, trim_blocks=trim_blocks, lstrip_blocks=lstrip_blocks, newline_sequence=newline_sequence, keep_trailing_newline=keep_trailing_newline, extensions=extensions, autoescape=autoescape_value, loader=FileSystemLoader(template_dir, encoding="utf-8"), ) class ComplexDirectorySandboxedEnvironment(SandboxedEnvironment): def join_path(self, template: str, parent: str) -> str: """ Add support for complex directory structures in the template directory. This method overrides the default functionality of the SandboxedEnvironment where all 'include' keywords expect to be in the same directory as the calling template, however this is unintuitive when using a complex directory structure. This override simulates the changing of directories when you include the template from a child directory. When the child then includes a template, it will make the path relative to the child directory rather than the top level template directory. """ # Must be posixpath because jinja only knows how to handle posix path includes return str(PurePosixPath(parent).parent / template) def recursive_render( template_dir: Path, environment: Environment, _root_dir: str | os.PathLike[str] = ".", ) -> list[str]: rendered_paths: list[str] = [] for root, file in ( (Path(root), file) for root, _, files in os.walk(template_dir) for file in files if not any( elem.startswith(".") for elem in Path(root).relative_to(template_dir).parts ) and not file.startswith(".") ): output_path = (_root_dir / root.relative_to(template_dir)).resolve() log.info("Rendering templates from %s to %s", root, output_path) output_path.mkdir(parents=True, exist_ok=True) if file.endswith(".j2"): # We know the file ends with .j2 by the filter in the for-loop output_filename = file[:-3] # Strip off the template directory from the front of the root path - # that's the output location relative to the repo root src_file_path = str((root / file).relative_to(template_dir)) output_file_path = str((output_path / output_filename).resolve()) # Although, file stream rendering is possible and preferred in most # situations, here it is not desired as you cannot read the previous # contents of a file during the rendering of the template. This mechanism # is used for inserting into a current changelog. When using stream rendering # of the same file, it always came back empty log.debug("rendering %s to %s", src_file_path, output_file_path) rendered_file = environment.get_template(src_file_path).render().rstrip() with open(output_file_path, "w", encoding="utf-8") as output_file: output_file.write(f"{rendered_file}\n") rendered_paths.append(output_file_path) else: src_file = str((root / file).resolve()) target_file = str((output_path / file).resolve()) log.debug( "source file %s is not a template, copying to %s", src_file, target_file ) shutil.copyfile(src_file, target_file) rendered_paths.append(target_file) return rendered_paths python-semantic-release-9.21.0/src/semantic_release/cli/000077500000000000000000000000001475670435200232215ustar00rootroot00000000000000python-semantic-release-9.21.0/src/semantic_release/cli/__init__.py000066400000000000000000000000001475670435200253200ustar00rootroot00000000000000python-semantic-release-9.21.0/src/semantic_release/cli/changelog_writer.py000066400000000000000000000221011475670435200271120ustar00rootroot00000000000000from __future__ import annotations import os from contextlib import suppress from logging import getLogger from pathlib import Path from typing import TYPE_CHECKING # NOTE: use backport with newer API than stdlib from importlib_resources import files import semantic_release from semantic_release.changelog.context import ( ReleaseNotesContext, autofit_text_width, create_pypi_url, make_changelog_context, ) from semantic_release.changelog.template import environment, recursive_render from semantic_release.cli.config import ChangelogOutputFormat from semantic_release.cli.const import ( DEFAULT_CHANGELOG_NAME_STEM, DEFAULT_RELEASE_NOTES_TPL_FILE, JINJA2_EXTENSION, ) from semantic_release.cli.util import noop_report from semantic_release.errors import InternalError from semantic_release.helpers import sort_numerically if TYPE_CHECKING: # pragma: no cover from jinja2 import Environment from semantic_release.changelog.context import ChangelogContext from semantic_release.changelog.release_history import Release, ReleaseHistory from semantic_release.cli.config import RuntimeContext from semantic_release.hvcs._base import HvcsBase log = getLogger(__name__) def get_default_tpl_dir(style: str, sub_dir: str | None = None) -> Path: module_base_path = Path(str(files(semantic_release.__name__))) default_templates_path = module_base_path.joinpath( f"data/templates/{style}", "" if sub_dir is None else sub_dir.strip("/"), ) if default_templates_path.is_dir(): return default_templates_path raise InternalError( str.join( " ", [ "Default template directory not found at", f"{default_templates_path}. Installation corrupted!", ], ) ) def render_default_changelog_file( output_format: ChangelogOutputFormat, changelog_context: ChangelogContext, changelog_style: str, ) -> str: tpl_dir = get_default_tpl_dir(style=changelog_style, sub_dir=output_format.value) changelog_tpl_file = Path(DEFAULT_CHANGELOG_NAME_STEM).with_suffix( str.join(".", ["", output_format.value, JINJA2_EXTENSION.lstrip(".")]) ) # Create a new environment as we don't want user's configuration as it might # not match our default template structure template_env = changelog_context.bind_to_environment( environment( autoescape=False, newline_sequence="\n", template_dir=tpl_dir, ) ) # Using the proper enviroment with the changelog context, render the template template = template_env.get_template(str(changelog_tpl_file)) changelog_content = template.render().rstrip() # Normalize line endings to ensure universal newlines because that is what is expected # of the content when we write it to a file. When using pathlib.Path.write_text(), it # will automatically normalize the file to the OS. At this point after render, we may # have mixed line endings because of the read_file() call of the previous changelog # (which may be /r/n or /n) return str.join( "\n", [line.replace("\r", "") for line in changelog_content.split("\n")] ) def render_release_notes( release_notes_template_file: str, template_env: Environment, ) -> str: # NOTE: release_notes_template_file must be a relative path to the template directory # because jinja2's filtering and template loading filter is janky template = template_env.get_template(release_notes_template_file) release_notes = template.render().rstrip() + os.linesep # Normalize line endings to match the current platform return str.join( os.linesep, [line.replace("\r", "") for line in release_notes.split("\n")] ) def apply_user_changelog_template_directory( template_dir: Path, environment: Environment, destination_dir: Path, noop: bool = False, ) -> list[str]: if noop: noop_report( str.join( " ", [ "would have recursively rendered the template directory", f"{template_dir!r} relative to {destination_dir!r}.", "Paths which would be modified by this operation cannot be", "determined in no-op mode.", ], ) ) return [] return recursive_render( template_dir, environment=environment, _root_dir=destination_dir ) def write_default_changelog( changelog_file: Path, destination_dir: Path, output_format: ChangelogOutputFormat, changelog_context: ChangelogContext, changelog_style: str, noop: bool = False, ) -> str: if noop: noop_report( str.join( " ", [ "would have written your changelog to", str(changelog_file.relative_to(destination_dir)), ], ) ) return str(changelog_file) changelog_text = render_default_changelog_file( output_format=output_format, changelog_context=changelog_context, changelog_style=changelog_style, ) # write_text() will automatically normalize newlines to the OS, so we just use an universal newline here changelog_file.write_text(f"{changelog_text}\n", encoding="utf-8") return str(changelog_file) def write_changelog_files( runtime_ctx: RuntimeContext, release_history: ReleaseHistory, hvcs_client: HvcsBase, noop: bool = False, ) -> list[str]: project_dir = Path(runtime_ctx.repo_dir) template_dir = runtime_ctx.template_dir changelog_context = make_changelog_context( hvcs_client=hvcs_client, release_history=release_history, mode=runtime_ctx.changelog_mode, insertion_flag=runtime_ctx.changelog_insertion_flag, prev_changelog_file=runtime_ctx.changelog_file, mask_initial_release=runtime_ctx.changelog_mask_initial_release, ) user_templates = [] # Update known templates list if Directory exists and directory has actual files to render if template_dir.is_dir(): user_templates.extend( [ f for f in template_dir.rglob("*") if f.is_file() and f.suffix == JINJA2_EXTENSION ] ) with suppress(ValueError): # do not include a release notes override when considering number of changelog templates user_templates.remove(template_dir / DEFAULT_RELEASE_NOTES_TPL_FILE) # Render user templates if found if len(user_templates) > 0: return apply_user_changelog_template_directory( template_dir=template_dir, environment=changelog_context.bind_to_environment( runtime_ctx.template_environment ), destination_dir=project_dir, noop=noop, ) log.info("No contents found in %r, using default changelog template", template_dir) return [ write_default_changelog( changelog_file=runtime_ctx.changelog_file, destination_dir=project_dir, output_format=runtime_ctx.changelog_output_format, changelog_context=changelog_context, changelog_style=runtime_ctx.changelog_style, noop=noop, ) ] def generate_release_notes( hvcs_client: HvcsBase, release: Release, template_dir: Path, history: ReleaseHistory, style: str, mask_initial_release: bool, license_name: str = "", ) -> str: users_tpl_file = template_dir / DEFAULT_RELEASE_NOTES_TPL_FILE # Determine if the user has a custom release notes template or we should use # the default template directory with our default release notes template tpl_dir = ( template_dir if users_tpl_file.is_file() else get_default_tpl_dir( style=style, sub_dir=ChangelogOutputFormat.MARKDOWN.value ) ) release_notes_tpl_file = ( users_tpl_file.name if users_tpl_file.is_file() else DEFAULT_RELEASE_NOTES_TPL_FILE ) release_notes_env = ReleaseNotesContext( repo_name=hvcs_client.repo_name, repo_owner=hvcs_client.owner, hvcs_type=hvcs_client.__class__.__name__.lower(), version=release["version"], release=release, mask_initial_release=mask_initial_release, license_name=license_name, filters=( *hvcs_client.get_changelog_context_filters(), create_pypi_url, autofit_text_width, sort_numerically, ), ).bind_to_environment( # Use a new, non-configurable environment for release notes - # not user-configurable at the moment environment(autoescape=False, template_dir=tpl_dir) ) # TODO: Remove in v10 release_notes_env.globals["context"] = release_notes_env.globals["ctx"] = { "history": history, "mask_initial_release": mask_initial_release, } return render_release_notes( release_notes_template_file=release_notes_tpl_file, template_env=release_notes_env, ) python-semantic-release-9.21.0/src/semantic_release/cli/cli_context.py000066400000000000000000000100461475670435200261070ustar00rootroot00000000000000from __future__ import annotations import logging from pathlib import Path from typing import TYPE_CHECKING import click from click.core import ParameterSource from git import InvalidGitRepositoryError from pydantic import ValidationError from semantic_release.cli.config import ( RawConfig, RuntimeContext, ) from semantic_release.cli.util import load_raw_config_file, rprint from semantic_release.errors import ( DetachedHeadGitError, InvalidConfiguration, NotAReleaseBranch, ) if TYPE_CHECKING: # pragma: no cover from semantic_release.cli.config import GlobalCommandLineOptions class CliContext(click.Context): obj: CliContextObj class CliContextObj: def __init__( self, ctx: click.Context, logger: logging.Logger, global_opts: GlobalCommandLineOptions, ) -> None: self.ctx = ctx self.logger = logger self.global_opts = global_opts self._raw_config: RawConfig | None = None self._runtime_ctx: RuntimeContext | None = None @property def raw_config(self) -> RawConfig: if self._raw_config is None: self._raw_config = self._init_raw_config() return self._raw_config @property def runtime_ctx(self) -> RuntimeContext: """ Lazy load the runtime context. This is done to avoid configuration loading when the command is not run. This is useful for commands like `--help` and `--version` """ if self._runtime_ctx is None: self._runtime_ctx = self._init_runtime_ctx() return self._runtime_ctx def _init_raw_config(self) -> RawConfig: config_path = Path(self.global_opts.config_file) conf_file_exists = config_path.exists() was_conf_file_user_provided = bool( self.ctx.get_parameter_source("config_file") not in ( ParameterSource.DEFAULT, ParameterSource.DEFAULT_MAP, ) ) # TODO: Evaluate Exeception catches try: if was_conf_file_user_provided and not conf_file_exists: raise FileNotFoundError( # noqa: TRY301 f"File {self.global_opts.config_file} does not exist" ) config_obj = ( {} if not conf_file_exists else load_raw_config_file(config_path) ) if not config_obj: self.logger.info( "configuration empty, falling back to default configuration" ) return RawConfig.model_validate(config_obj) except FileNotFoundError as exc: click.echo(str(exc), err=True) self.ctx.exit(2) except ( ValidationError, InvalidConfiguration, InvalidGitRepositoryError, ) as exc: click.echo(str(exc), err=True) self.ctx.exit(1) def _init_runtime_ctx(self) -> RuntimeContext: # TODO: Evaluate Exception catches try: runtime = RuntimeContext.from_raw_config( self.raw_config, global_cli_options=self.global_opts, ) except NotAReleaseBranch as exc: rprint(f"[bold {'red' if self.global_opts.strict else 'orange1'}]{exc!s}") # If not strict, exit 0 so other processes can continue. For example, in # multibranch CI it might be desirable to run a non-release branch's pipeline # without specifying conditional execution of PSR based on branch name self.ctx.exit(2 if self.global_opts.strict else 0) except ( DetachedHeadGitError, InvalidConfiguration, InvalidGitRepositoryError, ValidationError, ) as exc: click.echo(str(exc), err=True) self.ctx.exit(1) # This allows us to mask secrets in the logging # by applying it to all the configured handlers for handler in logging.getLogger().handlers: handler.addFilter(runtime.masker) return runtime python-semantic-release-9.21.0/src/semantic_release/cli/commands/000077500000000000000000000000001475670435200250225ustar00rootroot00000000000000python-semantic-release-9.21.0/src/semantic_release/cli/commands/__init__.py000066400000000000000000000000001475670435200271210ustar00rootroot00000000000000python-semantic-release-9.21.0/src/semantic_release/cli/commands/changelog.py000066400000000000000000000123411475670435200273240ustar00rootroot00000000000000from __future__ import annotations import logging from contextlib import suppress from pathlib import Path from typing import TYPE_CHECKING import click import tomlkit from git import GitCommandError, Repo from semantic_release.changelog.release_history import ReleaseHistory from semantic_release.cli.changelog_writer import ( generate_release_notes, write_changelog_files, ) from semantic_release.cli.util import noop_report from semantic_release.hvcs.remote_hvcs_base import RemoteHvcsBase if TYPE_CHECKING: # pragma: no cover from semantic_release.cli.cli_context import CliContextObj log = logging.getLogger(__name__) def get_license_name_for_release(tag_name: str, project_root: Path) -> str: # Retrieve the license name at the time of the specific release tag project_metadata: dict[str, str] = {} curr_dir = Path.cwd().resolve() allowed_directories = [ dir_path for dir_path in [curr_dir, *curr_dir.parents] if str(project_root) in str(dir_path) ] for allowed_dir in allowed_directories: proj_toml = allowed_dir.joinpath("pyproject.toml") with Repo(project_root) as git_repo, suppress(GitCommandError): toml_contents = git_repo.git.show( f"{tag_name}:{proj_toml.relative_to(project_root)}" ) config_toml = tomlkit.parse(toml_contents) project_metadata = config_toml.unwrap().get("project", project_metadata) break license_cfg = project_metadata.get( "license-expression", project_metadata.get( "license", "", ), ) if not isinstance(license_cfg, (str, dict)) or license_cfg is None: return "" return ( license_cfg.get("text", "") # type: ignore[attr-defined] if isinstance(license_cfg, dict) else license_cfg or "" ) def post_release_notes( release_tag: str, release_notes: str, prerelease: bool, hvcs_client: RemoteHvcsBase, noop: bool = False, ) -> None: if noop: noop_report( str.join( "\n", [ f"would have posted the following release notes for tag {release_tag}:", # Escape square brackets to ensure all content is displayed in the console # (i.e. prevent interpretation of ansi escape sequences that is valid markdown) release_notes.replace("[", "\\["), ], ) ) return hvcs_client.create_or_update_release( release_tag, release_notes, prerelease=prerelease, ) @click.command( short_help="Generate a changelog", context_settings={ "help_option_names": ["-h", "--help"], }, ) @click.option( "--post-to-release-tag", "release_tag", default=None, help="Post the generated release notes to the remote VCS's release for this tag", ) @click.pass_obj def changelog(cli_ctx: CliContextObj, release_tag: str | None) -> None: """Generate and optionally publish a changelog for your project""" ctx = click.get_current_context() runtime = cli_ctx.runtime_ctx translator = runtime.version_translator hvcs_client = runtime.hvcs_client with Repo(str(runtime.repo_dir)) as git_repo: release_history = ReleaseHistory.from_git_history( repo=git_repo, translator=translator, commit_parser=runtime.commit_parser, exclude_commit_patterns=runtime.changelog_excluded_commit_patterns, ) write_changelog_files( runtime_ctx=runtime, release_history=release_history, hvcs_client=hvcs_client, noop=runtime.global_cli_options.noop, ) if not release_tag: return if not isinstance(hvcs_client, RemoteHvcsBase): click.echo( "Remote does not support releases. Skipping release notes update...", err=True, ) return if not (version := translator.from_tag(release_tag)): click.echo( str.join( " ", [ f"Tag {release_tag!r} does not match the tag format", repr(translator.tag_format), ], ), err=True, ) ctx.exit(1) try: release = release_history.released[version] except KeyError: click.echo(f"tag {release_tag} not in release history", err=True) ctx.exit(2) release_notes = generate_release_notes( hvcs_client, release, runtime.template_dir, release_history, style=runtime.changelog_style, mask_initial_release=runtime.changelog_mask_initial_release, license_name=get_license_name_for_release( tag_name=release_tag, project_root=runtime.repo_dir, ), ) try: post_release_notes( release_tag=release_tag, release_notes=release_notes, prerelease=version.is_prerelease, hvcs_client=hvcs_client, noop=runtime.global_cli_options.noop, ) except Exception as e: log.exception(e) click.echo("Failed to post release notes to remote", err=True) ctx.exit(1) python-semantic-release-9.21.0/src/semantic_release/cli/commands/generate_config.py000066400000000000000000000032431475670435200305150ustar00rootroot00000000000000from __future__ import annotations import json import click import tomlkit from semantic_release.cli.config import RawConfig @click.command( short_help="Generate semantic-release's default configuration", context_settings={ "help_option_names": ["-h", "--help"], }, ) @click.option( "-f", "--format", "fmt", type=click.Choice(["toml", "json"], case_sensitive=False), default="toml", help="format for the config to be generated", ) @click.option( "--pyproject", "is_pyproject_toml", is_flag=True, help=( "Add TOML configuration under 'tool.semantic_release' instead of " "'semantic_release'" ), ) def generate_config(fmt: str = "toml", is_pyproject_toml: bool = False) -> None: """ Generate default configuration for semantic-release, to help you get started quickly. You can inspect the defaults, write to a file and then edit according to your needs. For example, to append the default configuration to your pyproject.toml file, you can use the following command: semantic-release generate-config --pyproject >> pyproject.toml """ # due to possible IntEnum values (which are not supported by tomlkit.dumps, see sdispater/tomlkit#237), # we must ensure the transformation of the model to a dict uses json serializable values config = RawConfig().model_dump(mode="json", exclude_none=True) config_dct = {"semantic_release": config} if is_pyproject_toml and fmt == "toml": config_dct = {"tool": config_dct} if fmt == "toml": click.echo(tomlkit.dumps(config_dct)) elif fmt == "json": click.echo(json.dumps(config_dct, indent=4)) python-semantic-release-9.21.0/src/semantic_release/cli/commands/main.py000066400000000000000000000102571475670435200263250ustar00rootroot00000000000000from __future__ import annotations import importlib import logging from enum import Enum # from typing import TYPE_CHECKING import click from rich.console import Console from rich.logging import RichHandler import semantic_release from semantic_release import globals from semantic_release.cli.cli_context import CliContextObj from semantic_release.cli.config import GlobalCommandLineOptions from semantic_release.cli.const import DEFAULT_CONFIG_FILE from semantic_release.cli.util import rprint from semantic_release.enums import SemanticReleaseLogLevels # if TYPE_CHECKING: # pass FORMAT = "[%(module)s.%(funcName)s] %(message)s" class Cli(click.MultiCommand): """Root MultiCommand for the semantic-release CLI""" class SubCmds(Enum): """Subcommand import definitions""" # SUBCMD_FUNCTION_NAME => MODULE_WITH_FUNCTION CHANGELOG = f"{__package__}.changelog" GENERATE_CONFIG = f"{__package__}.generate_config" VERSION = f"{__package__}.version" PUBLISH = f"{__package__}.publish" def list_commands(self, _ctx: click.Context) -> list[str]: # Used for shell-completion return [subcmd.lower().replace("_", "-") for subcmd in Cli.SubCmds.__members__] def get_command(self, _ctx: click.Context, name: str) -> click.Command | None: subcmd_name = name.lower().replace("-", "_") try: subcmd_def: Cli.SubCmds = Cli.SubCmds.__dict__[subcmd_name.upper()] module_path = subcmd_def.value subcmd_module = importlib.import_module(module_path) return getattr(subcmd_module, subcmd_name) except (KeyError, ModuleNotFoundError, AttributeError): return None @click.command( cls=Cli, context_settings={ "help_option_names": ["-h", "--help"], }, ) @click.version_option( version=semantic_release.__version__, prog_name="semantic-release", help="Show the version of semantic-release and exit", ) @click.option( "-c", "--config", "config_file", default=DEFAULT_CONFIG_FILE, help="Specify a configuration file for semantic-release to use", type=click.Path(), ) @click.option("--noop", "noop", is_flag=True, help="Run semantic-release in no-op mode") @click.option( "-v", "--verbose", "verbosity", help="Set logging verbosity", default=0, count=True, show_default=True, type=click.IntRange(0, 2, clamp=True), ) @click.option( "--strict", "strict", is_flag=True, default=False, help="Enable strict mode", ) @click.pass_context def main( ctx: click.Context, config_file: str = DEFAULT_CONFIG_FILE, verbosity: int = 0, noop: bool = False, strict: bool = False, ) -> None: """ Python Semantic Release Automated Semantic Versioning based on version 2.0.0 of the Semantic Versioning specification, which can be found at https://semver.org/spec/v2.0.0.html. Detect the next semantically correct version for a project based on the Git history, create and publish a changelog to a remote VCS, build a project. For more information, visit https://python-semantic-release.readthedocs.io/ """ console = Console(stderr=True) log_levels = [ SemanticReleaseLogLevels.WARNING, SemanticReleaseLogLevels.INFO, SemanticReleaseLogLevels.DEBUG, SemanticReleaseLogLevels.SILLY, ] globals.log_level = log_levels[verbosity] logging.basicConfig( level=globals.log_level, format=FORMAT, datefmt="[%X]", handlers=[ RichHandler( console=console, rich_tracebacks=True, tracebacks_suppress=[click] ), ], ) logger = logging.getLogger(__name__) logger.debug("logging level set to: %s", logging.getLevelName(globals.log_level)) if noop: rprint( ":shield: [bold cyan]You are running in no-operation mode, because the " "'--noop' flag was supplied" ) cli_options = GlobalCommandLineOptions( noop=noop, verbosity=verbosity, config_file=config_file, strict=strict ) logger.debug("global cli options: %s", cli_options) ctx.obj = CliContextObj(ctx, logger, cli_options) python-semantic-release-9.21.0/src/semantic_release/cli/commands/publish.py000066400000000000000000000054731475670435200270530ustar00rootroot00000000000000from __future__ import annotations import logging from typing import TYPE_CHECKING import click from git import Repo from semantic_release.cli.util import noop_report from semantic_release.hvcs.remote_hvcs_base import RemoteHvcsBase from semantic_release.version.algorithm import tags_and_versions if TYPE_CHECKING: # pragma: no cover from semantic_release.cli.cli_context import CliContextObj log = logging.getLogger(__name__) def publish_distributions( tag: str, hvcs_client: RemoteHvcsBase, dist_glob_patterns: tuple[str, ...], noop: bool = False, ) -> None: if noop: noop_report( str.join( " ", [ "would have uploaded files matching any of the globs", str.join(", ", [repr(g) for g in dist_glob_patterns]), "to a remote VCS release, if supported", ], ) ) return log.info("Uploading distributions to release") for pattern in dist_glob_patterns: hvcs_client.upload_dists(tag=tag, dist_glob=pattern) # type: ignore[attr-defined] @click.command( short_help="Publish distributions to VCS Releases", context_settings={ "help_option_names": ["-h", "--help"], }, ) @click.option( "--tag", "tag", help="The tag associated with the release to publish to", default="latest", ) @click.pass_obj def publish(cli_ctx: CliContextObj, tag: str) -> None: """Build and publish a distribution to a VCS release.""" ctx = click.get_current_context() runtime = cli_ctx.runtime_ctx hvcs_client = runtime.hvcs_client translator = runtime.version_translator dist_glob_patterns = runtime.dist_glob_patterns with Repo(str(runtime.repo_dir)) as git_repo: repo_tags = git_repo.tags if tag == "latest": try: tag = str(tags_and_versions(repo_tags, translator)[0][0]) except IndexError: click.echo( str.join( " ", [ "No tags found with format", repr(translator.tag_format), "couldn't identify latest version", ], ), err=True, ) ctx.exit(1) if tag not in {tag.name for tag in repo_tags}: click.echo(f"Tag '{tag}' not found in local repository!", err=True) ctx.exit(1) if not isinstance(hvcs_client, RemoteHvcsBase): click.echo( "Remote does not support artifact upload. Exiting with no action taken...", err=True, ) return publish_distributions( tag=tag, hvcs_client=hvcs_client, dist_glob_patterns=dist_glob_patterns, noop=runtime.global_cli_options.noop, ) python-semantic-release-9.21.0/src/semantic_release/cli/commands/version.py000066400000000000000000000617521475670435200270740ustar00rootroot00000000000000from __future__ import annotations import logging import os import subprocess import sys from collections import defaultdict from datetime import datetime, timezone from typing import TYPE_CHECKING import click import shellingham # type: ignore[import] from click_option_group import MutuallyExclusiveOptionGroup, optgroup from git import Repo from requests import HTTPError from semantic_release.changelog.release_history import ReleaseHistory from semantic_release.cli.changelog_writer import ( generate_release_notes, write_changelog_files, ) from semantic_release.cli.github_actions_output import VersionGitHubActionsOutput from semantic_release.cli.util import noop_report, rprint from semantic_release.const import DEFAULT_SHELL, DEFAULT_VERSION from semantic_release.enums import LevelBump from semantic_release.errors import ( BuildDistributionsError, GitCommitEmptyIndexError, InternalError, UnexpectedResponse, ) from semantic_release.gitproject import GitProject from semantic_release.hvcs.remote_hvcs_base import RemoteHvcsBase from semantic_release.version.algorithm import ( next_version, tags_and_versions, ) from semantic_release.version.translator import VersionTranslator if TYPE_CHECKING: # pragma: no cover from pathlib import Path from typing import Mapping, Sequence from git.refs.tag import Tag from semantic_release.cli.cli_context import CliContextObj from semantic_release.version.declaration import IVersionReplacer from semantic_release.version.version import Version log = logging.getLogger(__name__) def is_forced_prerelease( as_prerelease: bool, forced_level_bump: LevelBump | None, prerelease: bool ) -> bool: """ Determine if this release is forced to have prerelease on/off. If ``force_prerelease`` is set then yes. Otherwise if we are forcing a specific level bump without force_prerelease, it's False. Otherwise (``force_level is None``) use the value of ``prerelease`` """ local_vars = list(locals().items()) log.debug( "%s: %s", is_forced_prerelease.__name__, str.join(", ", iter(f"{k} = {v}" for k, v in local_vars)), ) return ( as_prerelease or forced_level_bump is LevelBump.PRERELEASE_REVISION or ((forced_level_bump is None) and prerelease) ) def last_released(repo_dir: Path, tag_format: str) -> tuple[Tag, Version] | None: with Repo(str(repo_dir)) as git_repo: ts_and_vs = tags_and_versions( git_repo.tags, VersionTranslator(tag_format=tag_format) ) return ts_and_vs[0] if ts_and_vs else None def version_from_forced_level( repo_dir: Path, forced_level_bump: LevelBump, translator: VersionTranslator ) -> Version: with Repo(str(repo_dir)) as git_repo: ts_and_vs = tags_and_versions(git_repo.tags, translator) # If we have no tags, return the default version if not ts_and_vs: # Since the translator is configured by the user, we can't guarantee that it will # be able to parse the default version. So we first cast it to a tag using the default # value and the users configured tag format, then parse it back to a version object default_initial_version = translator.from_tag( translator.str_to_tag(DEFAULT_VERSION) ) if default_initial_version is None: # This should never happen, but if it does, it's a bug raise InternalError( "Translator was unable to parse the embedded default version" ) return default_initial_version.bump(forced_level_bump) _, latest_version = ts_and_vs[0] if forced_level_bump is not LevelBump.PRERELEASE_REVISION: return latest_version.bump(forced_level_bump) # We need to find the latest version with the prerelease token # we're looking for, and return that version + an increment to # the prerelease revision. # NOTE this can probably be cleaned up. # ts_and_vs are in order, so check if we're looking at prereleases # for the same (major, minor, patch) as the latest version. # If we are, we can increment the revision and we're done. If # we don't find a prerelease targeting this version with the same # token as the one we're looking to prerelease, we can use revision 1. for _, version in ts_and_vs: if not ( version.major == latest_version.major and version.minor == latest_version.minor and version.patch == latest_version.patch ): break if ( version.is_prerelease and version.prerelease_token == translator.prerelease_token ): return version.bump(LevelBump.PRERELEASE_REVISION) return latest_version.to_prerelease(token=translator.prerelease_token, revision=1) def apply_version_to_source_files( repo_dir: Path, version_declarations: Sequence[IVersionReplacer], version: Version, noop: bool = False, ) -> list[str]: if len(version_declarations) < 1: return [] if not noop: log.debug("Updating version %s in repository files...", version) paths = list( map( lambda decl, new_version=version, noop=noop: ( # type: ignore[misc] decl.update_file_w_version(new_version=new_version, noop=noop) ), version_declarations, ) ) repo_filepaths = [ str(updated_file.relative_to(repo_dir)) for updated_file in paths if updated_file is not None ] if noop: noop_report( str.join( "", [ "would have updated versions in the following paths:", *[f"\n {filepath}" for filepath in repo_filepaths], ], ) ) return repo_filepaths def shell( cmd: str, *, env: Mapping[str, str] | None = None, check: bool = True ) -> subprocess.CompletedProcess: shell: str | None try: shell, _ = shellingham.detect_shell() except shellingham.ShellDetectionFailure: log.warning("failed to detect shell, using default shell: %s", DEFAULT_SHELL) log.debug("stack trace", exc_info=True) shell = DEFAULT_SHELL if not shell: raise TypeError("'shell' is None") shell_cmd_param = defaultdict( lambda: "-c", { "cmd": "/c", "powershell": "-Command", "pwsh": "-Command", }, ) return subprocess.run( # noqa: S603 [shell, shell_cmd_param[shell], cmd], env=(env or {}), check=check, ) def is_windows() -> bool: return sys.platform == "win32" def get_windows_env() -> Mapping[str, str | None]: return { environment_variable: os.getenv(environment_variable, None) for environment_variable in ( "ALLUSERSAPPDATA", "ALLUSERSPROFILE", "APPDATA", "COMMONPROGRAMFILES", "COMMONPROGRAMFILES(X86)", "DEFAULTUSERPROFILE", "HOMEPATH", "PATHEXT", "PROFILESFOLDER", "PROGRAMFILES", "PROGRAMFILES(X86)", "SYSTEM", "SYSTEM16", "SYSTEM32", "SYSTEMDRIVE", "SYSTEMPROFILE", "SYSTEMROOT", "TEMP", "TMP", "USERPROFILE", "USERSID", "WINDIR", ) } def build_distributions( build_command: str | None, build_command_env: Mapping[str, str] | None = None, noop: bool = False, ) -> None: """ Run the build command to build the distributions. :param build_command: The build command to run. :param build_command_env: The environment variables to use when running the build command. :param noop: Whether or not to run the build command. :raises: BuildDistributionsError: if the build command fails """ if not build_command: rprint("[green]No build command specified, skipping") return if noop: noop_report(f"would have run the build_command {build_command}") return log.info("Running build command %s", build_command) rprint(f"[bold green]:hammer_and_wrench: Running build command: {build_command}") build_env_vars: dict[str, str] = dict( filter( lambda k_v: k_v[1] is not None, # type: ignore[arg-type] { # Common values "PATH": os.getenv("PATH", ""), "HOME": os.getenv("HOME", None), "VIRTUAL_ENV": os.getenv("VIRTUAL_ENV", None), # Windows environment variables **(get_windows_env() if is_windows() else {}), # affects build decisions "CI": os.getenv("CI", None), # Identifies which CI environment "GITHUB_ACTIONS": os.getenv("GITHUB_ACTIONS", None), "GITLAB_CI": os.getenv("GITLAB_CI", None), "GITEA_ACTIONS": os.getenv("GITEA_ACTIONS", None), "BITBUCKET_CI": ( str(True).lower() if os.getenv("BITBUCKET_REPO_FULL_NAME", None) else None ), "PSR_DOCKER_GITHUB_ACTION": os.getenv("PSR_DOCKER_GITHUB_ACTION", None), **(build_command_env or {}), }.items(), ) ) try: shell(build_command, env=build_env_vars, check=True) rprint("[bold green]Build completed successfully!") except subprocess.CalledProcessError as exc: log.exception(exc) log.error("Build command failed with exit code %s", exc.returncode) # noqa: TRY400 raise BuildDistributionsError from exc @click.command( short_help="Detect and apply a new version", context_settings={ "help_option_names": ["-h", "--help"], }, ) @optgroup.group("Print flags", cls=MutuallyExclusiveOptionGroup) @optgroup.option( "--print", "print_only", is_flag=True, help="Print the next version and exit" ) @optgroup.option( "--print-tag", "print_only_tag", is_flag=True, help="Print the next version tag and exit", ) @optgroup.option( "--print-last-released", is_flag=True, help="Print the last released version and exit", ) @optgroup.option( "--print-last-released-tag", is_flag=True, help="Print the last released version tag and exit", ) @click.option( "--as-prerelease", "as_prerelease", is_flag=True, help="Ensure the next version to be released is a prerelease version", ) @click.option( "--prerelease-token", "prerelease_token", default=None, help="Force the next version to use this prerelease token, if it is a prerelease", ) @click.option( "--major", "force_level", flag_value="major", help="Force the next version to be a major release", ) @click.option( "--minor", "force_level", flag_value="minor", help="Force the next version to be a minor release", ) @click.option( "--patch", "force_level", flag_value="patch", help="Force the next version to be a patch release", ) @click.option( "--prerelease", "force_level", flag_value="prerelease_revision", help="Force the next version to be a prerelease", ) @click.option( "--commit/--no-commit", "commit_changes", default=True, help="Whether or not to commit changes locally", ) @click.option( "--tag/--no-tag", "create_tag", default=True, help="Whether or not to create a tag for the new version", ) @click.option( "--changelog/--no-changelog", "update_changelog", default=True, help="Whether or not to update the changelog", ) @click.option( "--push/--no-push", "push_changes", default=True, help="Whether or not to push the new commit and tag to the remote", ) @click.option( "--vcs-release/--no-vcs-release", "make_vcs_release", default=True, help="Whether or not to create a release in the remote VCS, if supported", ) @click.option( "--build-metadata", "build_metadata", default=os.getenv("PSR_BUILD_METADATA"), help="Build metadata to append to the new version", ) @click.option( "--skip-build", "skip_build", default=False, is_flag=True, help="Skip building the current project", ) @click.pass_obj def version( # noqa: C901 cli_ctx: CliContextObj, print_only: bool, print_only_tag: bool, print_last_released: bool, print_last_released_tag: bool, as_prerelease: bool, prerelease_token: str | None, commit_changes: bool, create_tag: bool, update_changelog: bool, push_changes: bool, make_vcs_release: bool, build_metadata: str | None, skip_build: bool, force_level: str | None = None, ) -> None: """ Detect the semantically correct next version that should be applied to your project. By default: * Write this new version to the project metadata locations specified in the configuration file * Create a new commit with these locations and any other assets configured to be included in a release * Tag this commit according the configured format, with a tag that uniquely identifies the version being released. * Push the new tag and commit to the remote for the repository * Create a release (if supported) in the remote VCS for this tag """ ctx = click.get_current_context() # Enable any cli overrides of configuration before asking for the runtime context config = cli_ctx.raw_config # We can short circuit updating the release if we are only printing the last released version if print_last_released or print_last_released_tag: # TODO: get tag format a better way if not ( last_release := last_released(config.repo_dir, tag_format=config.tag_format) ): log.warning("No release tags found.") return click.echo(last_release[0] if print_last_released_tag else last_release[1]) return # TODO: figure out --print of next version with & without branch validation # do you always need a prerelease token if its not --as-prerelease? runtime = cli_ctx.runtime_ctx translator = runtime.version_translator parser = runtime.commit_parser hvcs_client = runtime.hvcs_client assets = runtime.assets commit_author = runtime.commit_author commit_message = runtime.commit_message major_on_zero = runtime.major_on_zero no_verify = runtime.no_git_verify opts = runtime.global_cli_options gha_output = VersionGitHubActionsOutput(released=False) forced_level_bump = None if not force_level else LevelBump.from_string(force_level) prerelease = is_forced_prerelease( as_prerelease=as_prerelease, forced_level_bump=forced_level_bump, prerelease=runtime.prerelease, ) if prerelease_token: log.info("Forcing use of %s as the prerelease token", prerelease_token) translator.prerelease_token = prerelease_token # Only push if we're committing changes if push_changes and not commit_changes and not create_tag: log.info("changes will not be pushed because --no-commit disables pushing") push_changes &= commit_changes # Only push if we're creating a tag if push_changes and not create_tag and not commit_changes: log.info("new tag will not be pushed because --no-tag disables pushing") push_changes &= create_tag # Only make a release if we're pushing the changes if make_vcs_release and not push_changes: log.info("No vcs release will be created because pushing changes is disabled") make_vcs_release &= push_changes if not forced_level_bump: with Repo(str(runtime.repo_dir)) as git_repo: new_version = next_version( repo=git_repo, translator=translator, commit_parser=parser, prerelease=prerelease, major_on_zero=major_on_zero, allow_zero_version=runtime.allow_zero_version, ) else: log.warning( "Forcing a '%s' release due to '--%s' command-line flag", force_level, ( force_level if forced_level_bump is not LevelBump.PRERELEASE_REVISION else "prerelease" ), ) new_version = version_from_forced_level( repo_dir=runtime.repo_dir, forced_level_bump=forced_level_bump, translator=translator, ) # We only turn the forced version into a prerelease if the user has specified # that that is what they want on the command-line; otherwise we assume they are # forcing a full release new_version = ( new_version.to_prerelease(token=translator.prerelease_token) if prerelease else new_version.finalize_version() ) if build_metadata: new_version.build_metadata = build_metadata # Update GitHub Actions output value with new version & set delayed write gha_output.version = new_version ctx.call_on_close(gha_output.write_if_possible) # Make string variant of version or appropriate tag as necessary version_to_print = str(new_version) if not print_only_tag else new_version.as_tag() # Print the new version so that command-line output capture will work click.echo(version_to_print) with Repo(str(runtime.repo_dir)) as git_repo: # TODO: performance improvement - cache the result of tags_and_versions (previously done in next_version()) previously_released_versions = { v for _, v in tags_and_versions(git_repo.tags, translator) } # If the new version has already been released, we fail and abort if strict; # otherwise we exit with 0. if new_version in previously_released_versions: err_msg = str.join( " ", [ "[bold orange1]No release will be made,", f"{new_version!s} has already been released!", ], ) if opts.strict: click.echo(err_msg, err=True) ctx.exit(2) rprint(err_msg) return if print_only or print_only_tag: return with Repo(str(runtime.repo_dir)) as git_repo: release_history = ReleaseHistory.from_git_history( repo=git_repo, translator=translator, commit_parser=parser, exclude_commit_patterns=runtime.changelog_excluded_commit_patterns, ) rprint(f"[bold green]The next version is: [white]{new_version!s}[/white]! :rocket:") commit_date = datetime.now(timezone.utc).astimezone() # Locale-aware timestamp try: # Create release object for the new version # This will be used to generate the changelog prior to the commit and/or tag release_history = release_history.release( new_version, tagger=commit_author, committer=commit_author, tagged_date=commit_date, ) except ValueError as ve: click.echo(str(ve), err=True) ctx.exit(1) all_paths_to_add: list[str] = [] if update_changelog: # Write changelog files & add them to the list of files to commit all_paths_to_add.extend( write_changelog_files( runtime_ctx=runtime, release_history=release_history, hvcs_client=hvcs_client, noop=opts.noop, ) ) # Apply the new version to the source files files_with_new_version_written = apply_version_to_source_files( repo_dir=runtime.repo_dir, version_declarations=runtime.version_declarations, version=new_version, noop=opts.noop, ) all_paths_to_add.extend(files_with_new_version_written) all_paths_to_add.extend(assets or []) # Build distributions before committing any changes - this way if the # build fails, modifications to the source code won't be committed if skip_build: rprint("[bold orange1]Skipping build due to --skip-build flag") else: try: build_distributions( build_command=runtime.build_command, build_command_env={ # User defined overrides of environment (from config) **runtime.build_command_env, # PSR injected environment variables "NEW_VERSION": str(new_version), }, noop=opts.noop, ) except BuildDistributionsError as exc: click.echo(str(exc), err=True) click.echo("Build failed, aborting release", err=True) ctx.exit(1) project = GitProject( directory=runtime.repo_dir, commit_author=runtime.commit_author, credential_masker=runtime.masker, ) # Preparing for committing changes if commit_changes: project.git_add(paths=all_paths_to_add, noop=opts.noop) # NOTE: If we haven't modified any source code then we skip trying to make a commit # and any tag that we apply will be to the HEAD commit (made outside of # running PSR try: project.git_commit( message=commit_message.format(version=new_version), date=int(commit_date.timestamp()), no_verify=no_verify, noop=opts.noop, ) except GitCommitEmptyIndexError: log.info("No local changes to add to any commit, skipping") # Tag the version after potentially creating a new HEAD commit. # This way if no source code is modified, i.e. all metadata updates # are disabled, and the changelog generation is disabled or it's not # modified, then the HEAD commit will be tagged as a release commit # despite not being made by PSR if commit_changes or create_tag: project.git_tag( tag_name=new_version.as_tag(), message=new_version.as_tag(), isotimestamp=commit_date.isoformat(), noop=opts.noop, ) if push_changes: remote_url = runtime.hvcs_client.remote_url( use_token=not runtime.ignore_token_for_push ) if commit_changes: # TODO: integrate into push branch with Repo(str(runtime.repo_dir)) as git_repo: active_branch = git_repo.active_branch.name project.git_push_branch( remote_url=remote_url, branch=active_branch, noop=opts.noop, ) if create_tag: # push specific tag refspec (that we made) to remote project.git_push_tag( remote_url=remote_url, tag=new_version.as_tag(), noop=opts.noop, ) # Update GitHub Actions output value now that release has occurred gha_output.released = True if not make_vcs_release: return if not isinstance(hvcs_client, RemoteHvcsBase): log.info("Remote does not support releases. Skipping release creation...") return license_cfg = runtime.project_metadata.get( "license-expression", runtime.project_metadata.get( "license", "", ), ) if not isinstance(license_cfg, (str, dict)) or license_cfg is None: license_cfg = "" license_name = ( license_cfg.get("text", "") if isinstance(license_cfg, dict) else license_cfg or "" ) release_notes = generate_release_notes( hvcs_client, release=release_history.released[new_version], template_dir=runtime.template_dir, history=release_history, style=runtime.changelog_style, mask_initial_release=runtime.changelog_mask_initial_release, license_name=license_name, ) exception: Exception | None = None help_message = "" try: hvcs_client.create_release( tag=new_version.as_tag(), release_notes=release_notes, prerelease=new_version.is_prerelease, assets=assets, noop=opts.noop, ) except HTTPError as err: exception = err except UnexpectedResponse as err: exception = err help_message = str.join( " ", [ "Before re-running, make sure to clean up any artifacts", "on the hvcs that may have already been created.", ], ) help_message = str.join( "\n", [ "Unexpected response from remote VCS!", help_message, ], ) except Exception as err: # noqa: BLE001 # TODO: Remove this catch-all exception handler in the future exception = err finally: if exception is not None: log.exception(exception) click.echo(str(exception), err=True) if help_message: click.echo(help_message, err=True) click.echo( f"Failed to create release on {hvcs_client.__class__.__name__}!", err=True, ) ctx.exit(1) python-semantic-release-9.21.0/src/semantic_release/cli/config.py000066400000000000000000001013541475670435200250440ustar00rootroot00000000000000from __future__ import annotations import logging import os from collections.abc import Mapping from dataclasses import dataclass, is_dataclass from enum import Enum from functools import reduce from pathlib import Path from re import ( Pattern, compile as regexp, error as RegExpError, # noqa: N812 escape as regex_escape, ) from typing import Any, ClassVar, Dict, List, Literal, Optional, Tuple, Type, Union # typing_extensions is for Python 3.8, 3.9, 3.10 compatibility import tomlkit from git import Actor, InvalidGitRepositoryError from git.repo.base import Repo from jinja2 import Environment from pydantic import ( BaseModel, Field, RootModel, ValidationError, field_validator, model_validator, ) from typing_extensions import Annotated, Self from urllib3.util.url import parse_url import semantic_release.hvcs as hvcs from semantic_release.changelog.context import ChangelogMode from semantic_release.changelog.template import environment from semantic_release.cli.const import DEFAULT_CONFIG_FILE from semantic_release.cli.masking_filter import MaskingFilter from semantic_release.commit_parser import ( AngularCommitParser, CommitParser, ConventionalCommitParser, EmojiCommitParser, ParseResult, ParserOptions, ScipyCommitParser, TagCommitParser, ) from semantic_release.const import COMMIT_MESSAGE, DEFAULT_COMMIT_AUTHOR from semantic_release.errors import ( DetachedHeadGitError, InvalidConfiguration, MissingGitRemote, NotAReleaseBranch, ParserLoadError, ) from semantic_release.helpers import dynamic_import from semantic_release.version.declarations.i_version_replacer import IVersionReplacer from semantic_release.version.declarations.pattern import PatternVersionDeclaration from semantic_release.version.declarations.toml import TomlVersionDeclaration from semantic_release.version.translator import VersionTranslator log = logging.getLogger(__name__) NonEmptyString = Annotated[str, Field(..., min_length=1)] class HvcsClient(str, Enum): BITBUCKET = "bitbucket" GITHUB = "github" GITLAB = "gitlab" GITEA = "gitea" _known_commit_parsers: Dict[str, type[CommitParser]] = { "conventional": ConventionalCommitParser, "angular": AngularCommitParser, "emoji": EmojiCommitParser, "scipy": ScipyCommitParser, "tag": TagCommitParser, } _known_hvcs: Dict[HvcsClient, Type[hvcs.HvcsBase]] = { HvcsClient.BITBUCKET: hvcs.Bitbucket, HvcsClient.GITHUB: hvcs.Github, HvcsClient.GITLAB: hvcs.Gitlab, HvcsClient.GITEA: hvcs.Gitea, } class EnvConfigVar(BaseModel): env: str default: Optional[str] = None default_env: Optional[str] = None def getvalue(self) -> Optional[str]: return os.getenv(self.env, os.getenv(self.default_env or "", self.default)) MaybeFromEnv = Union[EnvConfigVar, str] class ChangelogOutputFormat(str, Enum): """Supported changelog output formats when using the default templates.""" MARKDOWN = "md" RESTRUCTURED_TEXT = "rst" NONE = "" class ChangelogEnvironmentConfig(BaseModel): block_start_string: str = "{%" block_end_string: str = "%}" variable_start_string: str = "{{" variable_end_string: str = "}}" comment_start_string: str = "{#" comment_end_string: str = "#}" line_statement_prefix: Optional[str] = None line_comment_prefix: Optional[str] = None trim_blocks: bool = False lstrip_blocks: bool = False newline_sequence: Literal["\n", "\r", "\r\n"] = "\n" keep_trailing_newline: bool = False extensions: Tuple[str, ...] = () autoescape: Union[bool, str] = False class DefaultChangelogTemplatesConfig(BaseModel): changelog_file: str = "CHANGELOG.md" output_format: ChangelogOutputFormat = ChangelogOutputFormat.NONE # TODO: Breaking Change v10, it will become True mask_initial_release: bool = False @model_validator(mode="after") def interpret_output_format(self) -> Self: # Set the output format value when no user input is given if self.output_format == ChangelogOutputFormat.NONE: try: # Note: If the user gave no extension, force '.' so enumeration fails, # and defaults to markdown # Otherwise normal files with extensions will just look for the extension support self.output_format = ChangelogOutputFormat( Path(self.changelog_file).suffix.lstrip(".") or "." ) except ValueError: self.output_format = ChangelogOutputFormat.MARKDOWN return self class ChangelogConfig(BaseModel): # TODO: BREAKING CHANGE v10, move to DefaultChangelogTemplatesConfig changelog_file: str = "" """Deprecated! Moved to 'default_templates.changelog_file'""" default_templates: DefaultChangelogTemplatesConfig = ( DefaultChangelogTemplatesConfig(output_format=ChangelogOutputFormat.NONE) ) environment: ChangelogEnvironmentConfig = ChangelogEnvironmentConfig() exclude_commit_patterns: Tuple[str, ...] = () mode: ChangelogMode = ChangelogMode.INIT insertion_flag: str = "" template_dir: str = "templates" @field_validator("exclude_commit_patterns", mode="after") @classmethod def validate_match(cls, patterns: Tuple[str, ...]) -> Tuple[str, ...]: curr_index = 0 try: for i, pattern in enumerate(patterns): curr_index = i regexp(pattern) except RegExpError as err: raise ValueError( f"exclude_commit_patterns[{curr_index}]: Invalid regular expression" ) from err return patterns @field_validator("changelog_file", mode="after") @classmethod def changelog_file_deprecation_warning(cls, val: str) -> str: log.warning( str.join( " ", [ "The 'changelog.changelog_file' configuration option is moving to 'changelog.default_templates.changelog_file'.", "Please update your configuration as the compatibility will break in v10.", ], ) ) return val @model_validator(mode="after") def move_changelog_file(self) -> Self: # TODO: Remove this method in v10 if not self.changelog_file: return self if self.changelog_file == self.default_templates.changelog_file: return self # Re-evaluate now that we are passing the changelog_file option down to default_templates # and only reset the output_format if it was not already set by the user self.default_templates = DefaultChangelogTemplatesConfig.model_validate( { **self.default_templates.model_dump(), "changelog_file": self.changelog_file, "output_format": ( self.default_templates.output_format if self.default_templates.output_format != ChangelogOutputFormat.MARKDOWN else ChangelogOutputFormat.NONE ), } ) return self @model_validator(mode="after") def load_default_insertion_flag_on_missing(self) -> Self: # Set the insertion flag value when no user input is given if not self.insertion_flag: defaults = { ChangelogOutputFormat.MARKDOWN: "", ChangelogOutputFormat.RESTRUCTURED_TEXT: f"..{os.linesep} version list", } try: self.insertion_flag = defaults[self.default_templates.output_format] except KeyError as err: raise ValueError( "changelog.default_templates.output_format cannot be None" ) from err return self class BranchConfig(BaseModel): match: str = "(main|master)" prerelease_token: str = "rc" # noqa: S105 prerelease: bool = False @field_validator("match", mode="after") @classmethod def validate_match(cls, match: str) -> str: # Allow the special case of a plain wildcard although it's not a valid regex if match == "*": return ".*" try: regexp(match) except RegExpError as err: raise ValueError(f"Invalid regex {match!r}") from err return match class RemoteConfig(BaseModel): name: str = "origin" token: Optional[str] = None url: Optional[str] = None type: HvcsClient = HvcsClient.GITHUB domain: Optional[str] = None api_domain: Optional[str] = None ignore_token_for_push: bool = False insecure: bool = False @field_validator("url", "domain", "api_domain", "token", mode="before") @classmethod def resolve_env_vars(cls, val: Any) -> str | None: ret_val = ( val if not isinstance(val, dict) else (EnvConfigVar.model_validate(val).getvalue()) ) return ret_val or None @model_validator(mode="after") def set_default_token(self) -> Self: # Set the default token name for the given VCS when no user input is given if self.token: return self if self.type not in _known_hvcs: return self if env_token := self._get_default_token(): self.token = env_token return self def _get_default_token(self) -> str | None: hvcs_client_class = _known_hvcs[self.type] default_token_name = ( getattr(hvcs_client_class, "DEFAULT_ENV_TOKEN_NAME") # noqa: B009 if hasattr(hvcs_client_class, "DEFAULT_ENV_TOKEN_NAME") else "" ) return ( EnvConfigVar(env=default_token_name).getvalue() if default_token_name else None ) @model_validator(mode="after") def check_url_scheme(self) -> Self: if self.url and isinstance(self.url, str): self.check_insecure_flag(self.url, "url") if self.domain and isinstance(self.domain, str): self.check_insecure_flag(self.domain, "domain") if self.api_domain and isinstance(self.api_domain, str): self.check_insecure_flag(self.api_domain, "api_domain") return self def check_insecure_flag(self, url_str: str, field_name: str) -> None: if not url_str: return scheme = parse_url(url_str).scheme if scheme == "http" and not self.insecure: raise ValueError( str.join( "\n", [ "Insecure 'HTTP' URL detected and disabled by default.", "Set the 'insecure' flag to 'True' to enable insecure connections.", ], ) ) if scheme == "https" and self.insecure: log.warning( str.join( "\n", [ f"'{field_name}' starts with 'https://' but the 'insecure' flag is set.", "This flag is only necessary for 'http://' URLs.", ], ) ) class PublishConfig(BaseModel): dist_glob_patterns: Tuple[str, ...] = ("dist/*",) upload_to_vcs_release: bool = True class RawConfig(BaseModel): assets: List[str] = [] branches: Dict[str, BranchConfig] = {"main": BranchConfig()} build_command: Optional[str] = None build_command_env: List[str] = [] changelog: ChangelogConfig = ChangelogConfig() commit_author: MaybeFromEnv = EnvConfigVar( env="GIT_COMMIT_AUTHOR", default=DEFAULT_COMMIT_AUTHOR ) commit_message: str = COMMIT_MESSAGE commit_parser: NonEmptyString = "conventional" # It's up to the parser_options() method to validate these commit_parser_options: Dict[str, Any] = {} logging_use_named_masks: bool = False major_on_zero: bool = True allow_zero_version: bool = True repo_dir: Annotated[Path, Field(validate_default=True)] = Path(".") remote: RemoteConfig = RemoteConfig() no_git_verify: bool = False tag_format: str = "v{version}" publish: PublishConfig = PublishConfig() version_toml: Optional[Tuple[str, ...]] = None version_variables: Optional[Tuple[str, ...]] = None @field_validator("repo_dir", mode="before") @classmethod def convert_str_to_path(cls, value: Any) -> Path: if not isinstance(value, (str, Path)): raise TypeError(f"Invalid type: {type(value)}, expected str or Path.") return Path(value) @field_validator("repo_dir", mode="after") @classmethod def verify_git_repo_dir(cls, dir_path: Path) -> Path: try: # Check for repository & walk up parent directories with Repo(str(dir_path), search_parent_directories=True) as git_repo: found_path = ( Path(git_repo.working_tree_dir or git_repo.working_dir) .expanduser() .absolute() ) except InvalidGitRepositoryError as err: raise InvalidGitRepositoryError("No valid git repository found!") from err if dir_path.absolute() != found_path: logging.warning( "Found .git/ in higher parent directory rather than provided in configuration." ) return found_path.resolve() @field_validator("commit_parser", mode="after") @classmethod def tag_commit_parser_deprecation_warning(cls, val: str) -> str: if val == "tag": log.warning( str.join( " ", [ "The legacy 'tag' parser is deprecated and will be removed in v11.", "Recommend swapping to our emoji parser (higher-compatibility)", "or switch to another supported parser.", ], ) ) return val @field_validator("commit_parser", mode="after") @classmethod def angular_commit_parser_deprecation_warning(cls, val: str) -> str: if val == "angular": log.warning( str.join( " ", [ "The 'angular' parser is deprecated and will be removed in v11.", "The Angular parser is being renamed to the conventional commit parser,", "which is selected by switching the 'commit_parser' value to 'conventional'.", ], ) ) return val @field_validator("build_command_env", mode="after") @classmethod def remove_whitespace(cls, val: list[str]) -> list[str]: return [entry.strip() for entry in val] @model_validator(mode="after") def set_default_opts(self) -> Self: # Set the default parser options for the given commit parser when no user input is given if not self.commit_parser_options and self.commit_parser: parser_opts_type = None # If the commit parser is a known one, pull the default options object from it if self.commit_parser in _known_commit_parsers: # TODO: BREAKING CHANGE v10 # parser_opts_type = ( # _known_commit_parsers[self.commit_parser] # .get_default_options() # .__class__ # ) parser_opts_type = _known_commit_parsers[ self.commit_parser ].parser_options else: try: # if its a custom parser, try to import it and pull the default options object type custom_class = dynamic_import(self.commit_parser) # TODO: BREAKING CHANGE v10 # parser_opts_type = custom_class.get_default_options().__class__ if hasattr(custom_class, "parser_options"): parser_opts_type = custom_class.parser_options except ModuleNotFoundError as err: raise ParserLoadError( str.join( "\n", [ str(err), "Unable to import your custom parser! Check your configuration!", ], ) ) from err except AttributeError as err: raise ParserLoadError( str.join( "\n", [ str(err), "Unable to find your custom parser class inside the given module.", "Check your configuration!", ], ) ) from err # from either the custom opts class or the known parser opts class, create an instance if callable(parser_opts_type): opts_obj: Any = parser_opts_type() # if the opts object is a dataclass, wrap it in a RootModel so it can be transformed to a Mapping opts_obj = ( opts_obj if not is_dataclass(opts_obj) else RootModel(opts_obj) ) # Must be a mapping, so if it's a BaseModel, dump the model to a dict self.commit_parser_options = ( opts_obj.model_dump() if isinstance(opts_obj, (BaseModel, RootModel)) else opts_obj ) if not isinstance(self.commit_parser_options, Mapping): raise ValidationError( f"Invalid parser options: {opts_obj}. Must be a mapping." ) return self @dataclass class GlobalCommandLineOptions: """ A dataclass to hold all the command line options that should be set in the RuntimeContext """ noop: bool = False verbosity: int = 0 config_file: str = DEFAULT_CONFIG_FILE strict: bool = False ###### # RuntimeContext ###### # This is what we want to attach to `click.Context.obj` # There are currently no defaults here - this is on purpose, # the defaults should be specified and handled by `RawConfig`. # When this is constructed we should know exactly what the user # wants def _recursive_getattr(obj: Any, path: str) -> Any: """ Used to find nested parts of RuntimeContext which might contain sensitive data. Returns None if an attribute is missing """ out = obj for part in path.split("."): out = getattr(out, part, None) return out @dataclass class RuntimeContext: _mask_attrs_: ClassVar[List[str]] = ["hvcs_client.token"] project_metadata: dict[str, Any] repo_dir: Path commit_parser: CommitParser[ParseResult, ParserOptions] version_translator: VersionTranslator major_on_zero: bool allow_zero_version: bool prerelease: bool no_git_verify: bool assets: List[str] commit_author: Actor commit_message: str changelog_excluded_commit_patterns: Tuple[Pattern[str], ...] version_declarations: Tuple[IVersionReplacer, ...] hvcs_client: hvcs.HvcsBase changelog_insertion_flag: str changelog_mask_initial_release: bool changelog_mode: ChangelogMode changelog_file: Path changelog_style: str changelog_output_format: ChangelogOutputFormat ignore_token_for_push: bool template_environment: Environment template_dir: Path build_command: Optional[str] build_command_env: dict[str, str] dist_glob_patterns: Tuple[str, ...] upload_to_vcs_release: bool global_cli_options: GlobalCommandLineOptions # This way the filter can be passed around if needed, so that another function # can accept the filter as an argument and call masker: MaskingFilter @staticmethod def resolve_from_env(param: Optional[MaybeFromEnv]) -> Optional[str]: if isinstance(param, EnvConfigVar): return param.getvalue() return param @staticmethod def select_branch_options( choices: Dict[str, BranchConfig], active_branch: str ) -> BranchConfig: for group, options in choices.items(): if regexp(options.match).match(active_branch): log.info( "Using group %r options, as %r matches %r", group, options.match, active_branch, ) return options log.debug( "Rejecting group %r as %r doesn't match %r", group, options.match, active_branch, ) raise NotAReleaseBranch( f"branch {active_branch!r} isn't in any release groups; " "no release will be made" ) def apply_log_masking(self, masker: MaskingFilter) -> MaskingFilter: for attr in self._mask_attrs_: masker.add_mask_for(str(_recursive_getattr(self, attr)), f"context.{attr}") masker.add_mask_for(repr(_recursive_getattr(self, attr)), f"context.{attr}") return masker @classmethod def from_raw_config( # noqa: C901 cls, raw: RawConfig, global_cli_options: GlobalCommandLineOptions ) -> RuntimeContext: ## # credentials masking for logging masker = MaskingFilter(_use_named_masks=raw.logging_use_named_masks) # TODO: move to config if we change how the generated config is constructed # Retrieve project metadata from pyproject.toml project_metadata: dict[str, str] = {} curr_dir = Path.cwd().resolve() allowed_directories = [ dir_path for dir_path in [curr_dir, *curr_dir.parents] if str(raw.repo_dir) in str(dir_path) ] for allowed_dir in allowed_directories: if (proj_toml := allowed_dir.joinpath("pyproject.toml")).exists(): config_toml = tomlkit.parse(proj_toml.read_text()) project_metadata = config_toml.unwrap().get("project", project_metadata) break # Retrieve details from repository with Repo(str(raw.repo_dir)) as git_repo: try: # Get the remote url by calling out to `git remote get-url`. This returns # the expanded url, taking into account any insteadOf directives # in the git configuration. remote_url = raw.remote.url or git_repo.git.remote( "get-url", raw.remote.name ) active_branch = git_repo.active_branch.name except ValueError as err: raise MissingGitRemote( f"Unable to locate remote named '{raw.remote.name}'." ) from err except TypeError as err: raise DetachedHeadGitError( "Detached HEAD state cannot match any release groups; " "no release will be made" ) from err # branch-specific configuration branch_config = cls.select_branch_options(raw.branches, active_branch) # commit_parser try: commit_parser_cls = ( _known_commit_parsers[raw.commit_parser] if raw.commit_parser in _known_commit_parsers else dynamic_import(raw.commit_parser) ) except ValueError as err: raise ParserLoadError( str.join( "\n", [ f"Unrecognized commit parser value: {raw.commit_parser!r}.", str(err), "Unable to load the given parser! Check your configuration!", ], ) ) from err except ModuleNotFoundError as err: raise ParserLoadError( str.join( "\n", [ str(err), "Unable to import your custom parser! Check your configuration!", ], ) ) from err except AttributeError as err: raise ParserLoadError( str.join( "\n", [ str(err), "Unable to find the parser class inside the given module", ], ) ) from err commit_parser_opts_class = commit_parser_cls.parser_options # TODO: Breaking change v10 # commit_parser_opts_class = commit_parser_cls.get_default_options().__class__ try: commit_parser = commit_parser_cls( options=commit_parser_opts_class(**raw.commit_parser_options) ) except TypeError as err: raise ParserLoadError( str.join("\n", [str(err), f"Failed to initialize {raw.commit_parser}"]) ) from err # We always exclude PSR's own release commits from the Changelog # when parsing commits psr_release_commit_regex = regexp( reduce( lambda regex_str, pattern: str(regex_str).replace(*pattern), ( # replace the version holder with a regex pattern to match various versions (regex_escape("{version}"), r"(?P\d+\.\d+\.\d+\S*)"), # TODO: add any other placeholders here ), # We use re.escape to ensure that the commit message is treated as a literal regex_escape(raw.commit_message), ) ) changelog_excluded_commit_patterns = ( psr_release_commit_regex, *(regexp(pattern) for pattern in raw.changelog.exclude_commit_patterns), ) _commit_author_str = cls.resolve_from_env(raw.commit_author) or "" _commit_author_valid = Actor.name_email_regex.match(_commit_author_str) if not _commit_author_valid: raise ValueError( f"Invalid git author: {_commit_author_str} " f"should match {Actor.name_email_regex}" ) commit_author = Actor(*_commit_author_valid.groups()) version_declarations: list[IVersionReplacer] = [] try: version_declarations.extend( TomlVersionDeclaration.from_string_definition(definition) for definition in iter(raw.version_toml or ()) ) except ValueError as err: raise InvalidConfiguration( str.join( "\n", [ "Invalid 'version_toml' configuration", str(err), ], ) ) from err try: version_declarations.extend( PatternVersionDeclaration.from_string_definition( definition, raw.tag_format ) for definition in iter(raw.version_variables or ()) ) except ValueError as err: raise InvalidConfiguration( str.join( "\n", [ "Invalid 'version_variables' configuration", str(err), ], ) ) from err # Provide warnings if the token is missing if not raw.remote.token: log.debug("hvcs token is not set") if not raw.remote.ignore_token_for_push: log.warning("Token value is missing!") # hvcs_client hvcs_client_cls = _known_hvcs[raw.remote.type] hvcs_client = hvcs_client_cls( remote_url=remote_url, hvcs_domain=raw.remote.domain, hvcs_api_domain=raw.remote.api_domain, token=raw.remote.token, allow_insecure=raw.remote.insecure, ) # changelog_file # Must use absolute after resolve because windows does not resolve if the path does not exist # which means it returns a relative path. So we force absolute to ensure path is complete # for the next check of path matching changelog_file = ( Path(raw.changelog.default_templates.changelog_file) .expanduser() .resolve() .absolute() ) # Prevent path traversal attacks if raw.repo_dir not in changelog_file.parents: raise InvalidConfiguration( "Changelog file destination must be inside of the repository directory." ) # Must use absolute after resolve because windows does not resolve if the path does not exist # which means it returns a relative path. So we force absolute to ensure path is complete # for the next check of path matching template_dir = ( Path(raw.changelog.template_dir).expanduser().resolve().absolute() ) # Prevent path traversal attacks if raw.repo_dir not in template_dir.parents: raise InvalidConfiguration( "Template directory must be inside of the repository directory." ) template_environment = environment( template_dir=template_dir, **raw.changelog.environment.model_dump(), ) # version_translator version_translator = VersionTranslator( tag_format=raw.tag_format, prerelease_token=branch_config.prerelease_token ) build_cmd_env = {} for i, env_var_def in enumerate(raw.build_command_env): # creative hack to handle, missing =, but also = that then can be unpacked # as the resulting parts array can be either 2 or 3 in length. it becomes 3 # with our forced empty value at the end which can be dropped parts = [*env_var_def.split("=", 1), ""] # removes any odd spacing around =, and extracts name=value name, env_val = (part.strip() for part in parts[:2]) if not name: # Skip when invalid format (ex. starting with = and no name) logging.warning( "Skipping invalid build_command_env[%s] definition", i, ) continue if not env_val and env_var_def[-1] != "=": # avoid the edge case that user wants to define a value as empty # and don't autoresolve it env_val = os.getenv(name, "") build_cmd_env[name] = env_val # TODO: better support for custom parsers that actually just extend defaults # # Here we just assume the desired changelog style matches the parser name # as we provide templates specific to each parser type. Unfortunately if the user has # provided a custom parser, it would be up to the user to provide custom templates # but we just assume the base template is angular # changelog_style = ( # raw.commit_parser # if raw.commit_parser in _known_commit_parsers # else "angular" # ) self = cls( project_metadata=project_metadata, repo_dir=raw.repo_dir, commit_parser=commit_parser, version_translator=version_translator, major_on_zero=raw.major_on_zero, allow_zero_version=raw.allow_zero_version, build_command=raw.build_command, build_command_env=build_cmd_env, version_declarations=tuple(version_declarations), hvcs_client=hvcs_client, changelog_file=changelog_file, changelog_mode=raw.changelog.mode, changelog_mask_initial_release=raw.changelog.default_templates.mask_initial_release, changelog_insertion_flag=raw.changelog.insertion_flag, assets=raw.assets, commit_author=commit_author, commit_message=raw.commit_message, changelog_excluded_commit_patterns=changelog_excluded_commit_patterns, # TODO: change when we have other styles per parser # changelog_style=changelog_style, # TODO: Breaking Change v10, change to conventional changelog_style="angular", changelog_output_format=raw.changelog.default_templates.output_format, prerelease=branch_config.prerelease, ignore_token_for_push=raw.remote.ignore_token_for_push, template_dir=template_dir, template_environment=template_environment, dist_glob_patterns=raw.publish.dist_glob_patterns, upload_to_vcs_release=raw.publish.upload_to_vcs_release, global_cli_options=global_cli_options, masker=masker, no_git_verify=raw.no_git_verify, ) # credential masker self.apply_log_masking(self.masker) return self python-semantic-release-9.21.0/src/semantic_release/cli/const.py000066400000000000000000000002431475670435200247200ustar00rootroot00000000000000DEFAULT_CONFIG_FILE = "pyproject.toml" DEFAULT_RELEASE_NOTES_TPL_FILE = ".release_notes.md.j2" DEFAULT_CHANGELOG_NAME_STEM = "CHANGELOG" JINJA2_EXTENSION = ".j2" python-semantic-release-9.21.0/src/semantic_release/cli/github_actions_output.py000066400000000000000000000043701475670435200302210ustar00rootroot00000000000000from __future__ import annotations import logging import os from semantic_release.version.version import Version log = logging.getLogger(__name__) class VersionGitHubActionsOutput: OUTPUT_ENV_VAR = "GITHUB_OUTPUT" def __init__( self, released: bool | None = None, version: Version | None = None, ) -> None: self._released = released self._version = version @property def released(self) -> bool | None: return self._released @released.setter def released(self, value: bool) -> None: if type(value) is not bool: raise TypeError("output 'released' is boolean") self._released = value @property def version(self) -> Version | None: return self._version if self._version is not None else None @version.setter def version(self, value: Version) -> None: if type(value) is not Version: raise TypeError("output 'released' should be a Version") self._version = value @property def tag(self) -> str | None: return self.version.as_tag() if self.version is not None else None @property def is_prerelease(self) -> bool | None: return self.version.is_prerelease if self.version is not None else None def to_output_text(self) -> str: missing = set() if self.version is None: missing.add("version") if self.released is None: missing.add("released") if missing: raise ValueError( f"some required outputs were not set: {', '.join(missing)}" ) outputs = { "released": str(self.released).lower(), "version": str(self.version), "tag": self.tag, "is_prerelease": str(self.is_prerelease).lower(), } return str.join("", [f"{key}={value!s}\n" for key, value in outputs.items()]) def write_if_possible(self, filename: str | None = None) -> None: output_file = filename or os.getenv(self.OUTPUT_ENV_VAR) if not output_file: log.info("not writing GitHub Actions output, as no file specified") return with open(output_file, "a", encoding="utf-8") as f: f.write(self.to_output_text()) python-semantic-release-9.21.0/src/semantic_release/cli/masking_filter.py000066400000000000000000000060001475670435200265650ustar00rootroot00000000000000from __future__ import annotations import logging import re from collections import defaultdict from typing import Iterable log = logging.getLogger(__name__) # https://relaxdiego.com/2014/07/logging-in-python.html # Updated/adapted for Python3 class MaskingFilter(logging.Filter): REPLACE_STR = "*" * 4 _UNWANTED = frozenset([s for obj in ("", None) for s in (repr(obj), str(obj))]) def __init__( self, _use_named_masks: bool = False, **patterns: Iterable[str | re.Pattern[str]], ) -> None: super().__init__() self._redact_patterns = defaultdict(set) for k, vs in patterns.items(): self._redact_patterns[k] = {v for v in vs if v and v not in self._UNWANTED} self._use_named_masks = _use_named_masks def add_mask_for(self, data: str, name: str = "redacted") -> MaskingFilter: if data and data not in self._UNWANTED: log.debug("Adding redact pattern '%r' to redact_patterns", name) self._redact_patterns[name].add(data) return self def filter(self, record: logging.LogRecord) -> bool: # Note if we blindly mask all types, we will actually cast arguments to # log functions from external libraries to strings before they are # formatted into the message - for example, a dependency calling # log.debug("%d", 15) will raise a TypeError as this filter would # otherwise convert 15 to "15", and "%d" % "15" raises the error. # One may find a specific example of where this issue could manifest itself # here: https://github.com/urllib3/urllib3/blob/a5b29ac1025f9bb30f2c9b756f3b171389c2c039/src/urllib3/connectionpool.py#L1003 # Anything which could reasonably be expected to be logged without being # cast to a string should be excluded from the cast here. record.msg = self.mask(record.msg) if record.args is None: pass elif isinstance(record.args, dict): record.args = { k: v if type(v) in (bool, int, float) else self.mask(str(v)) for k, v in record.args.items() } else: record.args = tuple( arg if type(arg) in (bool, int, float) else self.mask(str(arg)) for arg in record.args ) return True def mask(self, msg: str) -> str: if not isinstance(msg, str): log.debug( # type: ignore[unreachable] "cannot mask object of type %s", type(msg) ) return msg for mask, values in self._redact_patterns.items(): repl_string = ( self.REPLACE_STR if not self._use_named_masks else f"<{mask!r} (value removed)>" ) for data in values: if isinstance(data, str): msg = msg.replace(data, repl_string) elif isinstance(data, re.Pattern): msg = data.sub(repl_string, msg) return msg python-semantic-release-9.21.0/src/semantic_release/cli/util.py000066400000000000000000000072101475670435200245500ustar00rootroot00000000000000"""Utilities for command-line functionality""" from __future__ import annotations import json import logging import sys from pathlib import Path from textwrap import dedent, indent from typing import Any import rich import tomlkit from tomlkit.exceptions import TOMLKitError from semantic_release.errors import InvalidConfiguration log = logging.getLogger(__name__) def rprint(msg: str) -> None: """Rich-prints to stderr so that redirection of command output isn't cluttered""" rich.print(msg, file=sys.stderr) def noop_report(msg: str) -> None: """ Rich-prints a msg with a standard prefix to report when an action is not being taken due to a "noop" flag """ fullmsg = "[bold cyan][:shield: NOP] " + msg rprint(fullmsg) def indented(msg: str, prefix: str = " " * 4) -> str: """ Convenience function for text-formatting for the console. Ensures the least indented line of the msg string is indented by ``prefix`` with consistent alignment of the remainder of ``msg`` irrespective of the level of indentation in the Python source code """ return indent(dedent(msg), prefix=prefix) def parse_toml(raw_text: str) -> dict[Any, Any]: """ Attempts to parse raw configuration for semantic_release using tomlkit.loads, raising InvalidConfiguration if the TOML is invalid or there's no top level "semantic_release" or "tool.semantic_release" keys """ try: toml_text = tomlkit.loads(raw_text).unwrap() except TOMLKitError as exc: raise InvalidConfiguration(str(exc)) from exc # Look for [tool.semantic_release] cfg_text = toml_text.get("tool", {}).get("semantic_release") if cfg_text is not None: return cfg_text # Look for [semantic_release] or return {} if not found return toml_text.get("semantic_release", {}) def load_raw_config_file(config_file: Path | str) -> dict[Any, Any]: """ Load raw configuration as a dict from the filename specified by config_filename, trying the following parsing methods: 1. try to parse with tomli.load (guessing it's a TOML file) 2. try to parse with json.load (guessing it's a JSON file) 3. raise InvalidConfiguration if none of the above parsing methods work This function will also raise FileNotFoundError if it is raised while trying to read the specified configuration file """ log.info("Loading configuration from %s", config_file) raw_text = (Path() / config_file).resolve().read_text(encoding="utf-8") try: log.debug("Trying to parse configuration %s in TOML format", config_file) return parse_toml(raw_text) except InvalidConfiguration as e: log.debug("Configuration %s is invalid TOML: %s", config_file, str(e)) log.debug("trying to parse %s as JSON", config_file) try: # could be a "parse_json" function but it's a one-liner here return json.loads(raw_text)["semantic_release"] except KeyError: # valid configuration, but no "semantic_release" or "tool.semantic_release" # top level key log.debug( "configuration has no 'semantic_release' or 'tool.semantic_release' " "top-level key" ) return {} except json.JSONDecodeError as jde: raise InvalidConfiguration( dedent( f""" None of the supported configuration parsers were able to parse the configuration file {config_file}: * TOML: {e!s} * JSON: {jde!s} """ ) ) from jde python-semantic-release-9.21.0/src/semantic_release/commit_parser/000077500000000000000000000000001475670435200253165ustar00rootroot00000000000000python-semantic-release-9.21.0/src/semantic_release/commit_parser/__init__.py000066400000000000000000000013461475670435200274330ustar00rootroot00000000000000from semantic_release.commit_parser._base import ( CommitParser, ParserOptions, ) from semantic_release.commit_parser.angular import ( AngularCommitParser, AngularParserOptions, ) from semantic_release.commit_parser.conventional import ( ConventionalCommitParser, ConventionalCommitParserOptions, ) from semantic_release.commit_parser.emoji import ( EmojiCommitParser, EmojiParserOptions, ) from semantic_release.commit_parser.scipy import ( ScipyCommitParser, ScipyParserOptions, ) from semantic_release.commit_parser.tag import ( TagCommitParser, TagParserOptions, ) from semantic_release.commit_parser.token import ( ParsedCommit, ParseError, ParseResult, ParseResultType, ) python-semantic-release-9.21.0/src/semantic_release/commit_parser/_base.py000066400000000000000000000057621475670435200267530ustar00rootroot00000000000000from __future__ import annotations from abc import ABC, abstractmethod from typing import TYPE_CHECKING, Any, Generic, TypeVar from semantic_release.commit_parser.token import ParseResultType if TYPE_CHECKING: # pragma: no cover from git.objects.commit import Commit class ParserOptions(dict): """ ParserOptions should accept the keyword arguments they are interested in from configuration and process them as desired, ultimately creating attributes on an instance which can be accessed by the corresponding commit parser. For example: >>> class MyParserOptions(ParserOptions): ... def __init__(self, message_prefix: str) -> None: ... self.prefix = message_prefix * 2 >>> class MyCommitParser(AbstractCommitParser): ... parser_options = MyParserOptions ... ... def parse(self, Commit): ... print(self.options.prefix) ... ... Any defaults that need to be set should also be done in this class too. Invalid options should be signalled by raising an ``InvalidOptionsException`` within the ``__init__`` method of the options class. A dataclass is also well suited to this; if type-checking of input is desired, a ``pydantic.dataclasses.dataclass`` works well and is used internally by python-semantic-release. Parser options are not validated in the configuration and passed directly to the appropriate class to handle. """ def __init__(self, **_: Any) -> None: pass # TT = TokenType, a subclass of ParsedCommit _TT = TypeVar("_TT", bound=ParseResultType) _OPTS = TypeVar("_OPTS", bound=ParserOptions) class CommitParser(ABC, Generic[_TT, _OPTS]): """ Abstract base class for all commit parsers. Custom commit parsers should inherit from this class. A class-level ``parser_options`` attribute should be set to a subclass of ``BaseParserOptions``; this will be used to provide the default options to the parser. Note that a nested class can be used directly, if preferred: >>> class MyParser(CommitParser): @dataclass class parser_options(ParserOptions): allowed_types: Tuple[str] = ("feat", "fix", "docs") major_types: Tuple[str] = ("breaking",) minor_types: Tuple[str] = ("fix", "patch") ... def __init__(self, options: parser_options) -> None: ... """ # TODO: Deprecate in lieu of get_default_options() parser_options: type[ParserOptions] = ParserOptions def __init__(self, options: _OPTS | None = None) -> None: self.options: _OPTS = ( options if options is not None else self.get_default_options() ) # TODO: BREAKING CHANGE v10, add abstract method for all custom parsers # @staticmethod # @abstractmethod def get_default_options(self) -> _OPTS: return self.parser_options() # type: ignore[return-value] @abstractmethod def parse(self, commit: Commit) -> _TT | list[_TT]: ... python-semantic-release-9.21.0/src/semantic_release/commit_parser/angular.py000066400000000000000000000454731475670435200273360ustar00rootroot00000000000000""" Angular commit style parser https://github.com/angular/angular/blob/master/CONTRIBUTING.md#-commit-message-guidelines """ from __future__ import annotations import logging import re from functools import reduce from itertools import zip_longest from re import compile as regexp from textwrap import dedent from typing import TYPE_CHECKING, Tuple from git.objects.commit import Commit from pydantic.dataclasses import dataclass from semantic_release.commit_parser._base import CommitParser, ParserOptions from semantic_release.commit_parser.token import ( ParsedCommit, ParsedMessageResult, ParseError, ParseResult, ) from semantic_release.commit_parser.util import ( breaking_re, deep_copy_commit, force_str, parse_paragraphs, ) from semantic_release.enums import LevelBump from semantic_release.errors import InvalidParserOptions from semantic_release.helpers import sort_numerically, text_reducer if TYPE_CHECKING: # pragma: no cover from git.objects.commit import Commit logger = logging.getLogger(__name__) def _logged_parse_error(commit: Commit, error: str) -> ParseError: logger.debug(error) return ParseError(commit, error=error) # TODO: Remove from here, allow for user customization instead via options # types with long names in changelog LONG_TYPE_NAMES = { "build": "build system", "ci": "continuous integration", "chore": "chores", "docs": "documentation", "feat": "features", "fix": "bug fixes", "perf": "performance improvements", "refactor": "refactoring", "style": "code style", "test": "testing", } @dataclass class AngularParserOptions(ParserOptions): """Options dataclass for AngularCommitParser""" minor_tags: Tuple[str, ...] = ("feat",) """Commit-type prefixes that should result in a minor release bump.""" patch_tags: Tuple[str, ...] = ("fix", "perf") """Commit-type prefixes that should result in a patch release bump.""" other_allowed_tags: Tuple[str, ...] = ( "build", "chore", "ci", "docs", "style", "refactor", "test", ) """Commit-type prefixes that are allowed but do not result in a version bump.""" allowed_tags: Tuple[str, ...] = ( *minor_tags, *patch_tags, *other_allowed_tags, ) """ All commit-type prefixes that are allowed. These are used to identify a valid commit message. If a commit message does not start with one of these prefixes, it will not be considered a valid commit message. """ default_bump_level: LevelBump = LevelBump.NO_RELEASE """The minimum bump level to apply to valid commit message.""" # TODO: breaking change v10, change default to True parse_squash_commits: bool = False """Toggle flag for whether or not to parse squash commits""" # TODO: breaking change v10, change default to True ignore_merge_commits: bool = False """Toggle flag for whether or not to ignore merge commits""" @property def tag_to_level(self) -> dict[str, LevelBump]: """A mapping of commit tags to the level bump they should result in.""" return self._tag_to_level def __post_init__(self) -> None: self._tag_to_level: dict[str, LevelBump] = { str(tag): level for tag, level in [ # we have to do a type ignore as zip_longest provides a type that is not specific enough # for our expected output. Due to the empty second array, we know the first is always longest # and that means no values in the first entry of the tuples will ever be a LevelBump. We # apply a str() to make mypy happy although it will never happen. *zip_longest(self.allowed_tags, (), fillvalue=self.default_bump_level), *zip_longest(self.patch_tags, (), fillvalue=LevelBump.PATCH), *zip_longest(self.minor_tags, (), fillvalue=LevelBump.MINOR), ] if "|" not in str(tag) } class AngularCommitParser(CommitParser[ParseResult, AngularParserOptions]): """ A commit parser for projects conforming to the angular style of conventional commits. See https://www.conventionalcommits.org/en/v1.0.0-beta.4/ """ # TODO: Deprecate in lieu of get_default_options() parser_options = AngularParserOptions def __init__(self, options: AngularParserOptions | None = None) -> None: super().__init__(options) try: commit_type_pattern = regexp( r"(?P%s)" % str.join("|", self.options.allowed_tags) ) except re.error as err: raise InvalidParserOptions( str.join( "\n", [ f"Invalid options for {self.__class__.__name__}", "Unable to create regular expression from configured commit-types.", "Please check the configured commit-types and remove or escape any regular expression characters.", ], ) ) from err self.commit_prefix = regexp( str.join( "", [ f"^{commit_type_pattern.pattern}", r"(?:\((?P[^\n]+)\))?", # TODO: remove ! support as it is not part of the angular commit spec (its part of conventional commits spec) r"(?P!)?:\s+", ], ) ) self.re_parser = regexp( str.join( "", [ self.commit_prefix.pattern, r"(?P[^\n]+)", r"(?:\n\n(?P.+))?", # commit body ], ), flags=re.DOTALL, ) # GitHub & Gitea use (#123), GitLab uses (!123), and BitBucket uses (pull request #123) self.mr_selector = regexp( r"[\t ]+\((?:pull request )?(?P[#!]\d+)\)[\t ]*$" ) self.issue_selector = regexp( str.join( "", [ r"^(?:clos(?:e|es|ed|ing)|fix(?:es|ed|ing)?|resolv(?:e|es|ed|ing)|implement(?:s|ed|ing)?):", r"[\t ]+(?P.+)[\t ]*$", ], ), flags=re.MULTILINE | re.IGNORECASE, ) self.notice_selector = regexp(r"^NOTICE: (?P.+)$") self.filters = { "typo-extra-spaces": (regexp(r"(\S) +(\S)"), r"\1 \2"), "git-header-commit": ( regexp(r"^[\t ]*commit [0-9a-f]+$\n?", flags=re.MULTILINE), "", ), "git-header-author": ( regexp(r"^[\t ]*Author: .+$\n?", flags=re.MULTILINE), "", ), "git-header-date": ( regexp(r"^[\t ]*Date: .+$\n?", flags=re.MULTILINE), "", ), "git-squash-heading": ( regexp( r"^[\t ]*Squashed commit of the following:.*$\n?", flags=re.MULTILINE, ), "", ), "git-squash-commit-prefix": ( regexp( str.join( "", [ r"^(?:[\t ]*[*-][\t ]+|[\t ]+)?", # bullet points or indentation commit_type_pattern.pattern + r"\b", # prior to commit type ], ), flags=re.MULTILINE, ), # move commit type to the start of the line r"\1", ), } @staticmethod def get_default_options() -> AngularParserOptions: return AngularParserOptions() def commit_body_components_separator( self, accumulator: dict[str, list[str]], text: str ) -> dict[str, list[str]]: if (match := breaking_re.match(text)) and (brk_desc := match.group(1)): accumulator["breaking_descriptions"].append(brk_desc) # TODO: breaking change v10, removes breaking change footers from descriptions # return accumulator elif (match := self.notice_selector.match(text)) and ( notice := match.group("notice") ): accumulator["notices"].append(notice) # TODO: breaking change v10, removes notice footers from descriptions # return accumulator elif match := self.issue_selector.search(text): # if match := self.issue_selector.search(text): predicate = regexp(r",? and | *[,;/& ] *").sub( ",", match.group("issue_predicate") or "" ) # Almost all issue trackers use a number to reference an issue so # we use a simple regexp to validate the existence of a number which helps filter out # any non-issue references that don't fit our expected format has_number = regexp(r"\d+") new_issue_refs: set[str] = set( filter( lambda issue_str, validator=has_number: validator.search(issue_str), # type: ignore[arg-type] predicate.split(","), ) ) if new_issue_refs: accumulator["linked_issues"] = sort_numerically( set(accumulator["linked_issues"]).union(new_issue_refs) ) # TODO: breaking change v10, removes resolution footers from descriptions # return accumulator # Prevent appending duplicate descriptions if text not in accumulator["descriptions"]: accumulator["descriptions"].append(text) return accumulator def parse_message(self, message: str) -> ParsedMessageResult | None: if not (parsed := self.re_parser.match(message)): return None parsed_break = parsed.group("break") parsed_scope = parsed.group("scope") or "" parsed_subject = parsed.group("subject") parsed_text = parsed.group("text") parsed_type = parsed.group("type") linked_merge_request = "" if mr_match := self.mr_selector.search(parsed_subject): linked_merge_request = mr_match.group("mr_number") # TODO: breaking change v10, removes PR number from subject/descriptions # expects changelog template to format the line accordingly # parsed_subject = self.pr_selector.sub("", parsed_subject).strip() body_components: dict[str, list[str]] = reduce( self.commit_body_components_separator, [ # Insert the subject before the other paragraphs parsed_subject, *parse_paragraphs(parsed_text or ""), ], { "breaking_descriptions": [], "descriptions": [], "notices": [], "linked_issues": [], }, ) level_bump = ( LevelBump.MAJOR # TODO: remove parsed break support as it is not part of the angular commit spec (its part of conventional commits spec) if body_components["breaking_descriptions"] or parsed_break else self.options.tag_to_level.get( parsed_type, self.options.default_bump_level ) ) return ParsedMessageResult( bump=level_bump, type=parsed_type, category=LONG_TYPE_NAMES.get(parsed_type, parsed_type), scope=parsed_scope, descriptions=tuple(body_components["descriptions"]), breaking_descriptions=tuple(body_components["breaking_descriptions"]), release_notices=tuple(body_components["notices"]), linked_issues=tuple(body_components["linked_issues"]), linked_merge_request=linked_merge_request, ) @staticmethod def is_merge_commit(commit: Commit) -> bool: return len(commit.parents) > 1 def parse_commit(self, commit: Commit) -> ParseResult: if not (parsed_msg_result := self.parse_message(force_str(commit.message))): return _logged_parse_error( commit, f"Unable to parse commit message: {commit.message!r}", ) return ParsedCommit.from_parsed_message_result(commit, parsed_msg_result) # Maybe this can be cached as an optimization, similar to how # mypy/pytest use their own caching directories, for very large commit # histories? # The problem is the cache likely won't be present in CI environments def parse(self, commit: Commit) -> ParseResult | list[ParseResult]: """ Parse a commit message If the commit message is a squashed merge commit, it will be split into multiple commits, each of which will be parsed separately. Single commits will be returned as a list of a single ParseResult. """ if self.options.ignore_merge_commits and self.is_merge_commit(commit): return _logged_parse_error( commit, "Ignoring merge commit: %s" % commit.hexsha[:8] ) separate_commits: list[Commit] = ( self.unsquash_commit(commit) if self.options.parse_squash_commits else [commit] ) # Parse each commit individually if there were more than one parsed_commits: list[ParseResult] = list( map(self.parse_commit, separate_commits) ) def add_linked_merge_request( parsed_result: ParseResult, mr_number: str ) -> ParseResult: return ( parsed_result if not isinstance(parsed_result, ParsedCommit) else ParsedCommit( **{ **parsed_result._asdict(), "linked_merge_request": mr_number, } ) ) # TODO: improve this for other VCS systems other than GitHub & BitBucket # Github works as the first commit in a squash merge commit has the PR number # appended to the first line of the commit message lead_commit = next(iter(parsed_commits)) if isinstance(lead_commit, ParsedCommit) and lead_commit.linked_merge_request: # If the first commit has linked merge requests, assume all commits # are part of the same PR and add the linked merge requests to all # parsed commits parsed_commits = [ lead_commit, *map( lambda parsed_result, mr=lead_commit.linked_merge_request: ( # type: ignore[misc] add_linked_merge_request(parsed_result, mr) ), parsed_commits[1:], ), ] elif isinstance(lead_commit, ParseError) and ( mr_match := self.mr_selector.search(force_str(lead_commit.message)) ): # Handle BitBucket Squash Merge Commits (see #1085), which have non angular commit # format but include the PR number in the commit subject that we want to extract linked_merge_request = mr_match.group("mr_number") # apply the linked MR to all commits parsed_commits = [ add_linked_merge_request(parsed_result, linked_merge_request) for parsed_result in parsed_commits ] return parsed_commits def unsquash_commit(self, commit: Commit) -> list[Commit]: # GitHub EXAMPLE: # feat(changelog): add autofit_text_width filter to template environment (#1062) # # This change adds an equivalent style formatter that can apply a text alignment # to a maximum width and also maintain an indent over paragraphs of text # # * docs(changelog-templates): add definition & usage of autofit_text_width template filter # # * test(changelog-context): add test cases to check autofit_text_width filter use # # `git merge --squash` EXAMPLE: # Squashed commit of the following: # # commit 63ec09b9e844e616dcaa7bae35a0b66671b59fbb # Author: codejedi365 # Date: Sun Oct 13 12:05:23 2024 -0600 # # feat(release-config): some commit subject # # Return a list of artificial commits (each with a single commit message) return [ # create a artificial commit object (copy of original but with modified message) Commit( **{ **deep_copy_commit(commit), "message": commit_msg, } ) for commit_msg in self.unsquash_commit_message(force_str(commit.message)) ] or [commit] def unsquash_commit_message(self, message: str) -> list[str]: normalized_message = message.replace("\r", "").strip() # split by obvious separate commits (applies to manual git squash merges) obvious_squashed_commits = self.filters["git-header-commit"][0].split( normalized_message ) separate_commit_msgs: list[str] = reduce( lambda all_msgs, msgs: all_msgs + msgs, map(self._find_squashed_commits_in_str, obvious_squashed_commits), [], ) return list(filter(None, separate_commit_msgs)) def _find_squashed_commits_in_str(self, message: str) -> list[str]: separate_commit_msgs: list[str] = [] current_msg = "" for paragraph in filter(None, message.strip().split("\n\n")): # Apply filters to normalize the paragraph clean_paragraph = reduce(text_reducer, self.filters.values(), paragraph) # remove any filtered (and now empty) paragraphs (ie. the git headers) if not clean_paragraph.strip(): continue # Check if the paragraph is the start of a new angular commit if not self.commit_prefix.search(clean_paragraph): if not separate_commit_msgs and not current_msg: # if there are no separate commit messages and no current message # then this is the first commit message current_msg = dedent(clean_paragraph) continue # append the paragraph as part of the previous commit message if current_msg: current_msg += f"\n\n{dedent(clean_paragraph)}" # else: drop the paragraph continue # Since we found the start of the new commit, store any previous commit # message separately and start the new commit message if current_msg: separate_commit_msgs.append(current_msg) current_msg = clean_paragraph return [*separate_commit_msgs, current_msg] python-semantic-release-9.21.0/src/semantic_release/commit_parser/conventional.py000066400000000000000000000016061475670435200303720ustar00rootroot00000000000000from __future__ import annotations from pydantic.dataclasses import dataclass from semantic_release.commit_parser.angular import ( AngularCommitParser, AngularParserOptions, ) @dataclass class ConventionalCommitParserOptions(AngularParserOptions): """Options dataclass for the ConventionalCommitParser.""" class ConventionalCommitParser(AngularCommitParser): """ A commit parser for projects conforming to the conventional commits specification. See https://www.conventionalcommits.org/en/v1.0.0/ """ # TODO: Deprecate in lieu of get_default_options() parser_options = ConventionalCommitParserOptions def __init__(self, options: ConventionalCommitParserOptions | None = None) -> None: super().__init__(options) @staticmethod def get_default_options() -> ConventionalCommitParserOptions: return ConventionalCommitParserOptions() python-semantic-release-9.21.0/src/semantic_release/commit_parser/emoji.py000066400000000000000000000434221475670435200270000ustar00rootroot00000000000000"""Commit parser which looks for emojis to determine the type of commit""" from __future__ import annotations import logging import re from functools import reduce from itertools import zip_longest from re import compile as regexp from textwrap import dedent from typing import Tuple from git.objects.commit import Commit from pydantic.dataclasses import dataclass from semantic_release.commit_parser._base import CommitParser, ParserOptions from semantic_release.commit_parser.token import ( ParsedCommit, ParsedMessageResult, ParseError, ParseResult, ) from semantic_release.commit_parser.util import ( deep_copy_commit, force_str, parse_paragraphs, ) from semantic_release.enums import LevelBump from semantic_release.errors import InvalidParserOptions from semantic_release.helpers import sort_numerically, text_reducer logger = logging.getLogger(__name__) @dataclass class EmojiParserOptions(ParserOptions): """Options dataclass for EmojiCommitParser""" major_tags: Tuple[str, ...] = (":boom:",) """Commit-type prefixes that should result in a major release bump.""" minor_tags: Tuple[str, ...] = ( ":sparkles:", ":children_crossing:", ":lipstick:", ":iphone:", ":egg:", ":chart_with_upwards_trend:", ) """Commit-type prefixes that should result in a minor release bump.""" patch_tags: Tuple[str, ...] = ( ":ambulance:", ":lock:", ":bug:", ":zap:", ":goal_net:", ":alien:", ":wheelchair:", ":speech_balloon:", ":mag:", ":apple:", ":penguin:", ":checkered_flag:", ":robot:", ":green_apple:", ) """Commit-type prefixes that should result in a patch release bump.""" other_allowed_tags: Tuple[str, ...] = (":memo:", ":checkmark:") """Commit-type prefixes that are allowed but do not result in a version bump.""" allowed_tags: Tuple[str, ...] = ( *major_tags, *minor_tags, *patch_tags, *other_allowed_tags, ) """All commit-type prefixes that are allowed.""" default_bump_level: LevelBump = LevelBump.NO_RELEASE """The minimum bump level to apply to valid commit message.""" parse_linked_issues: bool = False """ Whether to parse linked issues from the commit message. Issue identification is not defined in the Gitmoji specification, so this parser will not attempt to parse issues by default. If enabled, the parser will use the same identification as GitHub, GitLab, and BitBucket use for linking issues, which is to look for a git commit message footer starting with "Closes:", "Fixes:", or "Resolves:" then a space, and then the issue identifier. The line prefix can be singular or plural and it is not case-sensitive but must have a colon and a whitespace separator. """ # TODO: breaking change v10, change default to True parse_squash_commits: bool = False """Toggle flag for whether or not to parse squash commits""" # TODO: breaking change v10, change default to True ignore_merge_commits: bool = False """Toggle flag for whether or not to ignore merge commits""" @property def tag_to_level(self) -> dict[str, LevelBump]: """A mapping of commit tags to the level bump they should result in.""" return self._tag_to_level def __post_init__(self) -> None: self._tag_to_level: dict[str, LevelBump] = { str(tag): level for tag, level in [ # we have to do a type ignore as zip_longest provides a type that is not specific enough # for our expected output. Due to the empty second array, we know the first is always longest # and that means no values in the first entry of the tuples will ever be a LevelBump. We # apply a str() to make mypy happy although it will never happen. *zip_longest(self.allowed_tags, (), fillvalue=self.default_bump_level), *zip_longest(self.patch_tags, (), fillvalue=LevelBump.PATCH), *zip_longest(self.minor_tags, (), fillvalue=LevelBump.MINOR), *zip_longest(self.major_tags, (), fillvalue=LevelBump.MAJOR), ] if "|" not in str(tag) } class EmojiCommitParser(CommitParser[ParseResult, EmojiParserOptions]): """ Parse a commit using an emoji in the subject line. When multiple emojis are encountered, the one with the highest bump level is used. If there are multiple emojis on the same level, the we use the one listed earliest in the configuration. If the message does not contain any known emojis, then the level to bump will be 0 and the type of change "Other". This parser never raises UnknownCommitMessageStyleError. Emojis are not removed from the description, and will appear alongside the commit subject in the changelog. """ # TODO: Deprecate in lieu of get_default_options() parser_options = EmojiParserOptions def __init__(self, options: EmojiParserOptions | None = None) -> None: super().__init__(options) # Reverse the list of tags to ensure that the highest level tags are matched first emojis_in_precedence_order = list(self.options.tag_to_level.keys())[::-1] try: highest_emoji_pattern = regexp( r"(?P%s)" % str.join("|", emojis_in_precedence_order) ) except re.error as err: raise InvalidParserOptions( str.join( "\n", [ f"Invalid options for {self.__class__.__name__}", "Unable to create regular expression from configured commit-types.", "Please check the configured commit-types and remove or escape any regular expression characters.", ], ) ) from err self.emoji_selector = regexp( str.join( "", [ f"^{highest_emoji_pattern.pattern}", r"(?:\((?P[^)]+)\))?:?", ], ) ) # GitHub & Gitea use (#123), GitLab uses (!123), and BitBucket uses (pull request #123) self.mr_selector = regexp( r"[\t ]+\((?:pull request )?(?P[#!]\d+)\)[\t ]*$" ) self.issue_selector = regexp( str.join( "", [ r"^(?:clos(?:e|es|ed|ing)|fix(?:es|ed|ing)?|resolv(?:e|es|ed|ing)|implement(?:s|ed|ing)?):", r"[\t ]+(?P.+)[\t ]*$", ], ), flags=re.MULTILINE | re.IGNORECASE, ) self.notice_selector = regexp(r"^NOTICE: (?P.+)$") self.filters = { "typo-extra-spaces": (regexp(r"(\S) +(\S)"), r"\1 \2"), "git-header-commit": ( regexp(r"^[\t ]*commit [0-9a-f]+$\n?", flags=re.MULTILINE), "", ), "git-header-author": ( regexp(r"^[\t ]*Author: .+$\n?", flags=re.MULTILINE), "", ), "git-header-date": ( regexp(r"^[\t ]*Date: .+$\n?", flags=re.MULTILINE), "", ), "git-squash-heading": ( regexp( r"^[\t ]*Squashed commit of the following:.*$\n?", flags=re.MULTILINE, ), "", ), "git-squash-commit-prefix": ( regexp( str.join( "", [ r"^(?:[\t ]*[*-][\t ]+|[\t ]+)?", # bullet points or indentation highest_emoji_pattern.pattern + r"(\W)", # prior to commit type ], ), flags=re.MULTILINE, ), # move commit type to the start of the line r"\1\2", ), } @staticmethod def get_default_options() -> EmojiParserOptions: return EmojiParserOptions() def commit_body_components_separator( self, accumulator: dict[str, list[str]], text: str ) -> dict[str, list[str]]: if (match := self.notice_selector.match(text)) and ( notice := match.group("notice") ): accumulator["notices"].append(notice) # TODO: breaking change v10, removes notice footers from descriptions # return accumulator elif self.options.parse_linked_issues and ( match := self.issue_selector.search(text) ): predicate = regexp(r",? and | *[,;/& ] *").sub( ",", match.group("issue_predicate") or "" ) # Almost all issue trackers use a number to reference an issue so # we use a simple regexp to validate the existence of a number which helps filter out # any non-issue references that don't fit our expected format has_number = regexp(r"\d+") new_issue_refs: set[str] = set( filter( lambda issue_str, validator=has_number: validator.search(issue_str), # type: ignore[arg-type] predicate.split(","), ) ) if new_issue_refs: accumulator["linked_issues"] = sort_numerically( set(accumulator["linked_issues"]).union(new_issue_refs) ) # TODO: breaking change v10, removes resolution footers from descriptions # return accumulator # Prevent appending duplicate descriptions if text not in accumulator["descriptions"]: accumulator["descriptions"].append(text) return accumulator def parse_message(self, message: str) -> ParsedMessageResult: subject = message.split("\n", maxsplit=1)[0] linked_merge_request = "" if mr_match := self.mr_selector.search(subject): linked_merge_request = mr_match.group("mr_number") # TODO: breaking change v10, removes PR number from subject/descriptions # expects changelog template to format the line accordingly # subject = self.mr_selector.sub("", subject).strip() # Search for emoji of the highest importance in the subject match = self.emoji_selector.search(subject) primary_emoji = match.group("type") if match else "Other" parsed_scope = (match.group("scope") if match else None) or "" level_bump = self.options.tag_to_level.get( primary_emoji, self.options.default_bump_level ) # All emojis will remain part of the returned description body_components: dict[str, list[str]] = reduce( self.commit_body_components_separator, parse_paragraphs(message), { "descriptions": [], "notices": [], "linked_issues": [], }, ) descriptions = tuple(body_components["descriptions"]) return ParsedMessageResult( bump=level_bump, type=primary_emoji, category=primary_emoji, scope=parsed_scope, # TODO: breaking change v10, removes breaking change footers from descriptions # descriptions=( # descriptions[:1] if level_bump is LevelBump.MAJOR else descriptions # ) descriptions=descriptions, breaking_descriptions=( descriptions[1:] if level_bump is LevelBump.MAJOR else () ), release_notices=tuple(body_components["notices"]), linked_issues=tuple(body_components["linked_issues"]), linked_merge_request=linked_merge_request, ) @staticmethod def is_merge_commit(commit: Commit) -> bool: return len(commit.parents) > 1 def parse_commit(self, commit: Commit) -> ParseResult: return ParsedCommit.from_parsed_message_result( commit, self.parse_message(force_str(commit.message)) ) def parse(self, commit: Commit) -> ParseResult | list[ParseResult]: """ Parse a commit message If the commit message is a squashed merge commit, it will be split into multiple commits, each of which will be parsed separately. Single commits will be returned as a list of a single ParseResult. """ if self.options.ignore_merge_commits and self.is_merge_commit(commit): err_msg = "Ignoring merge commit: %s" % commit.hexsha[:8] logger.debug(err_msg) return ParseError(commit, err_msg) separate_commits: list[Commit] = ( self.unsquash_commit(commit) if self.options.parse_squash_commits else [commit] ) # Parse each commit individually if there were more than one parsed_commits: list[ParseResult] = list( map(self.parse_commit, separate_commits) ) def add_linked_merge_request( parsed_result: ParseResult, mr_number: str ) -> ParseResult: return ( parsed_result if not isinstance(parsed_result, ParsedCommit) else ParsedCommit( **{ **parsed_result._asdict(), "linked_merge_request": mr_number, } ) ) # TODO: improve this for other VCS systems other than GitHub & BitBucket # Github works as the first commit in a squash merge commit has the PR number # appended to the first line of the commit message lead_commit = next(iter(parsed_commits)) if isinstance(lead_commit, ParsedCommit) and lead_commit.linked_merge_request: # If the first commit has linked merge requests, assume all commits # are part of the same PR and add the linked merge requests to all # parsed commits parsed_commits = [ lead_commit, *map( lambda parsed_result, mr=lead_commit.linked_merge_request: ( # type: ignore[misc] add_linked_merge_request(parsed_result, mr) ), parsed_commits[1:], ), ] return parsed_commits def unsquash_commit(self, commit: Commit) -> list[Commit]: # GitHub EXAMPLE: # ✨(changelog): add autofit_text_width filter to template environment (#1062) # # This change adds an equivalent style formatter that can apply a text alignment # to a maximum width and also maintain an indent over paragraphs of text # # * 🌐 Support Japanese language # # * ✅(changelog-context): add test cases to check autofit_text_width filter use # # `git merge --squash` EXAMPLE: # Squashed commit of the following: # # commit 63ec09b9e844e616dcaa7bae35a0b66671b59fbb # Author: codejedi365 # Date: Sun Oct 13 12:05:23 2024 -0000 # # ⚡️ (homepage): Lazyload home screen images # # # Return a list of artificial commits (each with a single commit message) return [ # create a artificial commit object (copy of original but with modified message) Commit( **{ **deep_copy_commit(commit), "message": commit_msg, } ) for commit_msg in self.unsquash_commit_message(force_str(commit.message)) ] or [commit] def unsquash_commit_message(self, message: str) -> list[str]: normalized_message = message.replace("\r", "").strip() # split by obvious separate commits (applies to manual git squash merges) obvious_squashed_commits = self.filters["git-header-commit"][0].split( normalized_message ) separate_commit_msgs: list[str] = reduce( lambda all_msgs, msgs: all_msgs + msgs, map(self._find_squashed_commits_in_str, obvious_squashed_commits), [], ) return list(filter(None, separate_commit_msgs)) def _find_squashed_commits_in_str(self, message: str) -> list[str]: separate_commit_msgs: list[str] = [] current_msg = "" for paragraph in filter(None, message.strip().split("\n\n")): # Apply filters to normalize the paragraph clean_paragraph = reduce(text_reducer, self.filters.values(), paragraph) # remove any filtered (and now empty) paragraphs (ie. the git headers) if not clean_paragraph.strip(): continue # Check if the paragraph is the start of a new angular commit if not self.emoji_selector.search(clean_paragraph): if not separate_commit_msgs and not current_msg: # if there are no separate commit messages and no current message # then this is the first commit message current_msg = dedent(clean_paragraph) continue # append the paragraph as part of the previous commit message if current_msg: current_msg += f"\n\n{dedent(clean_paragraph)}" # else: drop the paragraph continue # Since we found the start of the new commit, store any previous commit # message separately and start the new commit message if current_msg: separate_commit_msgs.append(current_msg) current_msg = clean_paragraph return [*separate_commit_msgs, current_msg] python-semantic-release-9.21.0/src/semantic_release/commit_parser/scipy.py000066400000000000000000000106561475670435200270270ustar00rootroot00000000000000""" Parses commit messages using `scipy tags `_ of the form:: (): The elements , and are optional. If no tag is present, the commit will be added to the changelog section "None" and no version increment will be performed. While is supported here it isn't actually part of the scipy style. If it is missing, parentheses around it are too. The commit should then be of the form:: : To communicate a breaking change add "BREAKING CHANGE" into the body at the beginning of a paragraph. Fill this paragraph with information how to migrate from the broken behavior to the new behavior. It will be added to the "Breaking" section of the changelog. Supported Tags:: ( API, DEP, ENH, REV, BUG, MAINT, BENCH, BLD, ) DEV, DOC, STY, TST, REL, FEAT, TEST Supported Changelog Sections:: breaking, feature, fix, Other, None .. _`scipy-style`: https://docs.scipy.org/doc/scipy/reference/dev/contributor/development_workflow.html#writing-the-commit-message """ from __future__ import annotations import logging from typing import TYPE_CHECKING, Tuple from pydantic.dataclasses import dataclass from semantic_release.commit_parser.angular import ( AngularCommitParser, AngularParserOptions, ) from semantic_release.commit_parser.token import ( ParsedMessageResult, ParseError, ) from semantic_release.enums import LevelBump if TYPE_CHECKING: # pragma: no cover from git.objects.commit import Commit logger = logging.getLogger(__name__) def _logged_parse_error(commit: Commit, error: str) -> ParseError: logger.debug(error) return ParseError(commit, error=error) tag_to_section = { "API": "breaking", "BENCH": "none", "BLD": "fix", "BUG": "fix", "DEP": "breaking", "DEV": "none", "DOC": "documentation", "ENH": "feature", "MAINT": "fix", "REV": "other", "STY": "none", "TST": "none", "REL": "none", # strictly speaking not part of the standard "FEAT": "feature", "TEST": "none", } @dataclass class ScipyParserOptions(AngularParserOptions): """ Options dataclass for ScipyCommitParser Scipy-style commit messages follow the same format as Angular-style commit just with different tag names. """ major_tags: Tuple[str, ...] = ("API",) """Commit-type prefixes that should result in a major release bump.""" minor_tags: Tuple[str, ...] = ("DEP", "DEV", "ENH", "REV", "FEAT") """Commit-type prefixes that should result in a minor release bump.""" patch_tags: Tuple[str, ...] = ("BLD", "BUG", "MAINT") """Commit-type prefixes that should result in a patch release bump.""" allowed_tags: Tuple[str, ...] = ( *major_tags, *minor_tags, *patch_tags, "BENCH", "DOC", "STY", "TST", "REL", "TEST", ) """ All commit-type prefixes that are allowed. These are used to identify a valid commit message. If a commit message does not start with one of these prefixes, it will not be considered a valid commit message. """ # TODO: breaking v10, make consistent with AngularParserOptions default_level_bump: LevelBump = LevelBump.NO_RELEASE """The minimum bump level to apply to valid commit message.""" def __post_init__(self) -> None: # TODO: breaking v10, remove as the name is now consistent self.default_bump_level = self.default_level_bump super().__post_init__() for tag in self.major_tags: self._tag_to_level[tag] = LevelBump.MAJOR class ScipyCommitParser(AngularCommitParser): """Parser for scipy-style commit messages""" # TODO: Deprecate in lieu of get_default_options() parser_options = ScipyParserOptions def __init__(self, options: ScipyParserOptions | None = None) -> None: super().__init__(options) @staticmethod def get_default_options() -> ScipyParserOptions: return ScipyParserOptions() def parse_message(self, message: str) -> ParsedMessageResult | None: return ( None if not (pmsg_result := super().parse_message(message)) else ParsedMessageResult( **{ **pmsg_result._asdict(), "category": tag_to_section.get(pmsg_result.type, "None"), } ) ) python-semantic-release-9.21.0/src/semantic_release/commit_parser/tag.py000066400000000000000000000067321475670435200264530ustar00rootroot00000000000000"""Legacy commit parser from Python Semantic Release 1.0""" from __future__ import annotations import logging import re from git.objects.commit import Commit from pydantic.dataclasses import dataclass from semantic_release.commit_parser._base import CommitParser, ParserOptions from semantic_release.commit_parser.token import ParsedCommit, ParseError, ParseResult from semantic_release.commit_parser.util import breaking_re, parse_paragraphs from semantic_release.enums import LevelBump logger = logging.getLogger(__name__) re_parser = re.compile(r"(?P[^\n]+)" + r"(:?\n\n(?P.+))?", re.DOTALL) @dataclass class TagParserOptions(ParserOptions): minor_tag: str = ":sparkles:" patch_tag: str = ":nut_and_bolt:" def _logged_parse_error(commit: Commit, error: str) -> ParseError: logger.debug(error) return ParseError(commit, error=error) class TagCommitParser(CommitParser[ParseResult, TagParserOptions]): """ Parse a commit message according to the 1.0 version of python-semantic-release. It expects a tag of some sort in the commit message and will use the rest of the first line as changelog content. """ # TODO: Deprecate in lieu of get_default_options() parser_options = TagParserOptions @staticmethod def get_default_options() -> TagParserOptions: return TagParserOptions() def parse(self, commit: Commit) -> ParseResult | list[ParseResult]: message = str(commit.message) # Attempt to parse the commit message with a regular expression parsed = re_parser.match(message) if not parsed: return _logged_parse_error( commit, error=f"Unable to parse the given commit message: {message!r}" ) subject = parsed.group("subject") # Check tags for minor or patch if self.options.minor_tag in message: level = "feature" level_bump = LevelBump.MINOR if subject: subject = subject.replace(self.options.minor_tag, "") elif self.options.patch_tag in message: level = "fix" level_bump = LevelBump.PATCH if subject: subject = subject.replace(self.options.patch_tag, "") else: # We did not find any tags in the commit message return _logged_parse_error( commit, error=f"Unable to parse the given commit message: {message!r}" ) if parsed.group("text"): descriptions = parse_paragraphs(parsed.group("text")) else: descriptions = [] descriptions.insert(0, subject.strip()) # Look for descriptions of breaking changes breaking_descriptions = [ match.group(1) for match in (breaking_re.match(p) for p in descriptions[1:]) if match ] if breaking_descriptions: level = "breaking" level_bump = LevelBump.MAJOR logger.debug( "commit %s upgraded to a %s level_bump due to included breaking descriptions", commit.hexsha[:8], level_bump, ) logger.debug( "commit %s introduces a %s level_bump", commit.hexsha[:8], level_bump ) return ParsedCommit( bump=level_bump, type=level, scope="", descriptions=descriptions, breaking_descriptions=breaking_descriptions, commit=commit, ) python-semantic-release-9.21.0/src/semantic_release/commit_parser/token.py000066400000000000000000000173401475670435200270150ustar00rootroot00000000000000from __future__ import annotations from typing import TYPE_CHECKING, NamedTuple, NoReturn, TypeVar, Union from semantic_release.commit_parser.util import force_str from semantic_release.errors import CommitParseError if TYPE_CHECKING: # pragma: no cover from git.objects.commit import Commit from semantic_release.enums import LevelBump class ParsedMessageResult(NamedTuple): """ A read-only named tuple object representing the result from parsing a commit message. Essentially this is a data structure which holds the parsed information from a commit message without the actual commit object itself. Very helpful for unit testing. Most of the fields will replicate the fields of a :py:class:`ParsedCommit ` """ bump: LevelBump type: str category: str scope: str descriptions: tuple[str, ...] breaking_descriptions: tuple[str, ...] = () release_notices: tuple[str, ...] = () linked_issues: tuple[str, ...] = () linked_merge_request: str = "" include_in_changelog: bool = True class ParsedCommit(NamedTuple): """A read-only named tuple object representing the result of parsing a commit message.""" bump: LevelBump """A LevelBump enum value indicating what type of change this commit introduces.""" type: str """ The type of the commit as a string, per the commit message style. This is up to the parser to implement; for example, the EmojiCommitParser parser fills this field with the emoji representing the most significant change for the commit. """ scope: str """ The scope, as a string, parsed from the commit. Generally an optional field based on the commit message style which means it very likely can be an empty string. Commit styles which do not have a meaningful concept of "scope" usually fill this field with an empty string. """ descriptions: list[str] """ A list of paragraphs from the commit message. Paragraphs are generally delimited by a double-newline since git commit messages are sometimes manually wordwrapped with a single newline, but this is up to the parser to implement. """ breaking_descriptions: list[str] """ A list of paragraphs which are deemed to identify and describe breaking changes by the parser. An example would be a paragraph which begins with the text ``BREAKING CHANGE:`` in the commit message but the parser gennerally strips the prefix and includes the rest of the paragraph in this list. """ commit: Commit """The original commit object (a class defined by GitPython) that was parsed""" release_notices: tuple[str, ...] = () """ A tuple of release notices, which are additional information about the changes that affect the user. An example would be a paragraph which begins with the text ``NOTICE:`` in the commit message but the parser generally strips the prefix and includes the rest of the paragraph in this list. """ linked_issues: tuple[str, ...] = () """ A tuple of issue numbers as strings, if the commit is contains issue references. If there are no issue references, this should be an empty tuple. Although, we generally refer to them as "issue numbers", it generally should be a string to adhere to the prefixes used by the VCS (ex. ``#`` for GitHub, GitLab, etc.) or issue tracker (ex. JIRA uses ``AAA-###``). """ linked_merge_request: str = "" """ A pull request or merge request definition, if the commit is labeled with a pull/merge request number. This is a string value which includes any special character prefix used by the VCS (e.g. ``#`` for GitHub, ``!`` for GitLab). """ include_in_changelog: bool = True """ A boolean value indicating whether this commit should be included in the changelog. This enables parsers to flag commits which are not user-facing or are otherwise not relevant to the changelog to be filtered out by PSR's internal algorithms. """ @property def message(self) -> str: """ A string representation of the commit message. This is a pass through property for convience to access the ``message`` attribute of the ``commit`` object. If the message is of type ``bytes`` then it will be decoded to a ``UTF-8`` string. """ return force_str(self.commit.message).replace("\r", "") @property def hexsha(self) -> str: """ A hex representation of the hash value of the commit. This is a pass through property for convience to access the ``hexsha`` attribute of the ``commit``. """ return self.commit.hexsha @property def short_hash(self) -> str: """A short representation of the hash value (in hex) of the commit.""" return self.hexsha[:7] @property def linked_pull_request(self) -> str: """An alias to the linked_merge_request attribute.""" return self.linked_merge_request def is_merge_commit(self) -> bool: return bool(len(self.commit.parents) > 1) @staticmethod def from_parsed_message_result( commit: Commit, parsed_message_result: ParsedMessageResult ) -> ParsedCommit: """A convience method to create a ParsedCommit object from a ParsedMessageResult object and a Commit object.""" return ParsedCommit( bump=parsed_message_result.bump, # TODO: breaking v10, swap back to type rather than category type=parsed_message_result.category, scope=parsed_message_result.scope, descriptions=list(parsed_message_result.descriptions), breaking_descriptions=list(parsed_message_result.breaking_descriptions), commit=commit, release_notices=parsed_message_result.release_notices, linked_issues=parsed_message_result.linked_issues, linked_merge_request=parsed_message_result.linked_merge_request, include_in_changelog=parsed_message_result.include_in_changelog, ) class ParseError(NamedTuple): """A read-only named tuple object representing an error that occurred while parsing a commit message.""" commit: Commit """The original commit object (a class defined by GitPython) that was parsed""" error: str """A string with a description for why the commit parsing failed.""" @property def message(self) -> str: """ A string representation of the commit message. This is a pass through property for convience to access the ``message`` attribute of the ``commit`` object. If the message is of type ``bytes`` then it will be decoded to a ``UTF-8`` string. """ return force_str(self.commit.message).replace("\r", "") @property def hexsha(self) -> str: """ A hex representation of the hash value of the commit. This is a pass through property for convience to access the ``hexsha`` attribute of the ``commit``. """ return self.commit.hexsha @property def short_hash(self) -> str: """A short representation of the hash value (in hex) of the commit.""" return self.hexsha[:7] def is_merge_commit(self) -> bool: return bool(len(self.commit.parents) > 1) def raise_error(self) -> NoReturn: """A convience method to raise a CommitParseError with the error message.""" raise CommitParseError(self.error) _T = TypeVar("_T", bound=ParsedCommit) _E = TypeVar("_E", bound=ParseError) # For extensions, this type can be used to build an alias # for example CustomParseResult = ParseResultType[CustomParsedCommit, ParseError] ParseResultType = Union[_T, _E] ParseResult = ParseResultType[ParsedCommit, ParseError] python-semantic-release-9.21.0/src/semantic_release/commit_parser/util.py000066400000000000000000000075671475670435200266640ustar00rootroot00000000000000from __future__ import annotations from contextlib import suppress from copy import deepcopy from functools import reduce from re import MULTILINE, compile as regexp from typing import TYPE_CHECKING # TODO: remove in v10 from semantic_release.helpers import ( sort_numerically, # noqa: F401 # TODO: maintained for compatibility ) if TYPE_CHECKING: # pragma: no cover from re import Pattern from typing import Any, TypedDict from git import Commit class RegexReplaceDef(TypedDict): pattern: Pattern repl: str breaking_re = regexp(r"BREAKING[ -]CHANGE:\s?(.*)") un_word_wrap: RegexReplaceDef = { # Match a line ending where the next line is not indented, or a bullet "pattern": regexp(r"((? list[str]: r""" This will take a text block and return a list containing each paragraph with single line breaks collapsed into spaces. To handle Windows line endings, carriage returns '\r' are removed before separating into paragraphs. It will attempt to detect Git footers and they will not be condensed. :param text: The text string to be divided. :return: A list of condensed paragraphs, as strings. """ adjusted_text = reduce( lambda txt, adj: adj["pattern"].sub(adj["repl"], txt), [trim_line_endings, un_word_wrap_hyphen], text, ) # Repeat replacements until no more changes are made prev_iteration = "" while prev_iteration != adjusted_text: prev_iteration = adjusted_text adjusted_text = spread_out_git_footers["pattern"].sub( spread_out_git_footers["repl"], adjusted_text ) return list( filter( None, [ un_word_wrap["pattern"].sub(un_word_wrap["repl"], paragraph).strip() for paragraph in adjusted_text.strip().split("\n\n") ], ) ) def force_str(msg: str | bytes | bytearray | memoryview) -> str: # This shouldn't be a thing but typing is being weird around what # git.commit.message returns and the memoryview type won't go away message = msg.tobytes() if isinstance(msg, memoryview) else msg return ( message.decode("utf-8") if isinstance(message, (bytes, bytearray)) else str(message) ) def deep_copy_commit(commit: Commit) -> dict[str, Any]: keys = [ "repo", "binsha", "author", "authored_date", "committer", "committed_date", "message", "tree", "parents", "encoding", "gpgsig", "author_tz_offset", "committer_tz_offset", ] kwargs = {} for key in keys: with suppress(ValueError): if hasattr(commit, key) and (value := getattr(commit, key)) is not None: if key in ["parents", "repo", "tree"]: # These tend to have circular references so don't deepcopy them kwargs[key] = value continue kwargs[key] = deepcopy(value) return kwargs python-semantic-release-9.21.0/src/semantic_release/const.py000066400000000000000000000015351475670435200241560ustar00rootroot00000000000000from __future__ import annotations import os import re PYPI_WEB_DOMAIN = "pypi.org" # https://semver.org/#is-there-a-suggested-regular-expression-regex-to-check-a-semver-string SEMVER_REGEX = re.compile( r""" (?P0|[1-9]\d*) \. (?P0|[1-9]\d*) \. (?P0|[1-9]\d*) (?:-(?P(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))? (?:\+(?P[0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))? """, flags=re.VERBOSE, ) COMMIT_MESSAGE = "{version}\n\nAutomatically generated by python-semantic-release" DEFAULT_COMMIT_AUTHOR = "semantic-release " DEFAULT_VERSION = "0.0.0" DEFAULT_SHELL: str | None = None if os.name == "posix": DEFAULT_SHELL = os.getenv("SHELL", "/bin/sh") elif os.name == "nt": DEFAULT_SHELL = os.getenv("COMSPEC") python-semantic-release-9.21.0/src/semantic_release/data/000077500000000000000000000000001475670435200233635ustar00rootroot00000000000000python-semantic-release-9.21.0/src/semantic_release/data/templates/000077500000000000000000000000001475670435200253615ustar00rootroot00000000000000python-semantic-release-9.21.0/src/semantic_release/data/templates/angular/000077500000000000000000000000001475670435200270125ustar00rootroot00000000000000python-semantic-release-9.21.0/src/semantic_release/data/templates/angular/md/000077500000000000000000000000001475670435200274125ustar00rootroot00000000000000python-semantic-release-9.21.0/src/semantic_release/data/templates/angular/md/.components/000077500000000000000000000000001475670435200316555ustar00rootroot00000000000000changelog_header.md.j2000066400000000000000000000002461475670435200356730ustar00rootroot00000000000000python-semantic-release-9.21.0/src/semantic_release/data/templates/angular/md/.components# CHANGELOG {% if ctx.changelog_mode == "update" %}{# # IMPORTANT: add insertion flag for next version update #}{{ insertion_flag ~ "\n" }}{% endif %} changelog_init.md.j2000066400000000000000000000015361475670435200354110ustar00rootroot00000000000000python-semantic-release-9.21.0/src/semantic_release/data/templates/angular/md/.components{# This changelog template initializes a full changelog for the project, it follows the following logic: 1. Header 2. Any Unreleased Details (uncommon) 3. all previous releases except the very first release 4. the first release #}{# # Header #}{% include "changelog_header.md.j2" -%}{# # Any Unreleased Details (uncommon) #}{% include "unreleased_changes.md.j2" -%}{# # Since this is initialization, we are generating all the previous # release notes per version. The very first release notes is specialized #}{% if releases | length > 0 %}{% for release in releases %}{{ "\n" }}{% if loop.last and ctx.mask_initial_release %}{%- include "first_release.md.j2" -%}{% else %}{%- include "versioned_changes.md.j2" -%}{% endif %}{{ "\n" }}{% endfor %}{% endif %} changelog_update.md.j2000066400000000000000000000050561475670435200357310ustar00rootroot00000000000000python-semantic-release-9.21.0/src/semantic_release/data/templates/angular/md/.components{# This Update changelog template uses the following logic: 1. Read previous changelog file (ex. project_root/CHANGELOG.md) 2. Split on insertion flag (ex. ) 3. Print top half of previous changelog 3. New Changes (unreleased commits & newly released) 4. Print bottom half of previous changelog Note: if a previous file was not found, it does not write anything at the bottom but render does NOT fail #}{% set prev_changelog_contents = prev_changelog_file | read_file | safe %}{% set changelog_parts = prev_changelog_contents.split(insertion_flag, maxsplit=1) %}{# #}{% if changelog_parts | length < 2 %}{# # insertion flag was not found, check if the file was empty or did not exist #}{% if prev_changelog_contents | length > 0 %}{# # File has content but no insertion flag, therefore, file will not be updated #}{{ changelog_parts[0] }}{% else %}{# # File was empty or did not exist, therefore, it will be created from scratch #}{% include "changelog_init.md.j2" %}{% endif %}{% else %}{# # Previous Changelog Header # - Depending if there is header content, then it will separate the insertion flag # with a newline from header content, otherwise it will just print the insertion flag #}{% set prev_changelog_top = changelog_parts[0] | trim %}{% if prev_changelog_top | length > 0 %}{{ "%s\n\n%s\n" | format(prev_changelog_top, insertion_flag | trim) }}{% else %}{{ "%s\n" | format(insertion_flag | trim) }}{% endif %}{# # Any Unreleased Details (uncommon) #}{% include "unreleased_changes.md.j2" -%}{# #}{% if releases | length > 0 %}{# # Latest Release Details #}{% set release = releases[0] %}{# #}{% if releases | length == 1 and ctx.mask_initial_release %}{# # First Release detected #}{{ "\n" }}{%- include "first_release.md.j2" -%}{{ "\n" }}{# #}{% elif "# " ~ release.version.as_semver_tag() ~ " " not in changelog_parts[1] %}{# # The release version is not already in the changelog so we add it #}{{ "\n" }}{%- include "versioned_changes.md.j2" -%}{{ "\n" }}{# #}{% endif %}{% endif %}{# # Previous Changelog Footer # - skips printing footer if empty, which happens when the insertion_flag # was at the end of the file (ignoring whitespace) #}{% set previous_changelog_bottom = changelog_parts[1] | trim %}{% if previous_changelog_bottom | length > 0 %}{{ "\n%s\n" | format(previous_changelog_bottom) }}{% endif %}{% endif %} changes.md.j2000066400000000000000000000135151475670435200340470ustar00rootroot00000000000000python-semantic-release-9.21.0/src/semantic_release/data/templates/angular/md/.components{% from 'macros.md.j2' import apply_alphabetical_ordering_by_brk_descriptions %}{% from 'macros.md.j2' import apply_alphabetical_ordering_by_descriptions %}{% from 'macros.md.j2' import apply_alphabetical_ordering_by_release_notices %}{% from 'macros.md.j2' import format_breaking_changes_description, format_commit_summary_line %}{% from 'macros.md.j2' import format_release_notice %}{# EXAMPLE: ### Features - Add new feature ([#10](https://domain.com/namespace/repo/pull/10), [`abcdef0`](https://domain.com/namespace/repo/commit/HASH)) - **scope**: Add new feature ([`abcdef0`](https://domain.com/namespace/repo/commit/HASH)) ### Bug Fixes - Fix bug ([#11](https://domain.com/namespace/repo/pull/11), [`abcdef1`](https://domain.com/namespace/repo/commit/HASH)) ### Breaking Changes - With the change _____, the change causes ___ effect. Ultimately, this section it is a more detailed description of the breaking change. With an optional scope prefix like the commit messages above. - **scope**: this breaking change has a scope to identify the part of the code that this breaking change applies to for better context. ### Additional Release Information - This is a release note that provides additional information about the release that is not a breaking change or a feature/bug fix. - **scope**: this release note has a scope to identify the part of the code that this release note applies to for better context. #}{% set max_line_width = max_line_width | default(100) %}{% set hanging_indent = hanging_indent | default(2) %}{# #}{% for type_, commits in commit_objects if type_ != "unknown" %}{# PREPROCESS COMMITS (order by description & format description line) #}{% set ns = namespace(commits=commits) %}{% set _ = apply_alphabetical_ordering_by_descriptions(ns) %}{# #}{% set commit_descriptions = [] %}{# #}{% for commit in ns.commits %}{# # Update the first line with reference links and if commit description # has more than one line, add the rest of the lines # NOTE: This is specifically to make sure to not hide contents # of squash commits (until parse support is added) #}{% set description = "- %s" | format(format_commit_summary_line(commit)) %}{% if commit.descriptions | length > 1 %}{% set description = "%s\n\n%s" | format( description, commit.descriptions[1:] | join("\n\n") ) %}{% endif %}{% set description = description | autofit_text_width(max_line_width, hanging_indent) %}{% set _ = commit_descriptions.append(description) %}{% endfor %}{# # # PRINT SECTION (header & commits) #}{% if commit_descriptions | length > 0 %}{{ "\n" }}{{ "### %s\n" | format(type_ | title) }}{{ "\n" }}{{ "%s\n" | format(commit_descriptions | unique | join("\n\n")) }}{% endif %}{% endfor %}{# # Determine if there are any breaking change commits by filtering the list by breaking descriptions # commit_objects is a list of tuples [("Features", [ParsedCommit(), ...]), ("Bug Fixes", [ParsedCommit(), ...])] # HOW: Filter out breaking change commits that have no breaking descriptions # 1. Re-map the list to only the list of commits under the breaking category from the list of tuples # 2. Peel off the outer list to get a list of ParsedCommit objects # 3. Filter the list of ParsedCommits to only those with a breaking description #}{% set breaking_commits = commit_objects | map(attribute="1.0") %}{% set breaking_commits = breaking_commits | rejectattr("error", "defined") | selectattr("breaking_descriptions.0") | list %}{# #}{% if breaking_commits | length > 0 %}{# PREPROCESS COMMITS #}{% set brk_ns = namespace(commits=breaking_commits) %}{% set _ = apply_alphabetical_ordering_by_brk_descriptions(brk_ns) %}{# #}{% set brking_descriptions = [] %}{# #}{% for commit in brk_ns.commits %}{% set full_description = "- %s" | format( format_breaking_changes_description(commit).split("\n\n") | join("\n\n- ") ) %}{% set _ = brking_descriptions.append( full_description | autofit_text_width(max_line_width, hanging_indent) ) %}{% endfor %}{# # # PRINT BREAKING CHANGE DESCRIPTIONS (header & descriptions) #}{{ "\n" }}{{ "### Breaking Changes\n" }}{{ "\n%s\n" | format(brking_descriptions | unique | join("\n\n")) }}{# #}{% endif %}{# # Determine if there are any commits with release notice information by filtering the list by release_notices # commit_objects is a list of tuples [("Features", [ParsedCommit(), ...]), ("Bug Fixes", [ParsedCommit(), ...])] # HOW: Filter out commits that have no release notices # 1. Re-map the list to only the list of commits from the list of tuples # 2. Peel off the outer list to get a list of ParsedCommit objects # 3. Filter the list of ParsedCommits to only those with a release notice #}{% set notice_commits = commit_objects | map(attribute="1.0") %}{% set notice_commits = notice_commits | rejectattr("error", "defined") | selectattr("release_notices.0") | list %}{# #}{% if notice_commits | length > 0 %}{# PREPROCESS COMMITS #}{% set notice_ns = namespace(commits=notice_commits) %}{% set _ = apply_alphabetical_ordering_by_release_notices(notice_ns) %}{# #}{% set release_notices = [] %}{# #}{% for commit in notice_ns.commits %}{% set full_description = "- %s" | format( format_release_notice(commit).split("\n\n") | join("\n\n- ") ) %}{% set _ = release_notices.append( full_description | autofit_text_width(max_line_width, hanging_indent) ) %}{% endfor %}{# # # PRINT RELEASE NOTICE INFORMATION (header & descriptions) #}{{ "\n" }}{{ "### Additional Release Information\n" }}{{ "\n%s\n" | format(release_notices | unique | join("\n\n")) }}{# #}{% endif %} first_release.md.j2000066400000000000000000000006451475670435200352660ustar00rootroot00000000000000python-semantic-release-9.21.0/src/semantic_release/data/templates/angular/md/.components{# EXAMPLE: ## vX.X.X (YYYY-MMM-DD) _This release is published under the MIT License._ # Release Notes Only - Initial Release #}{{ "## %s (%s)\n" | format( release.version.as_semver_tag(), release.tagged_date.strftime("%Y-%m-%d") ) }}{% if license_name is defined and license_name %}{{ "\n_This release is published under the %s License._\n" | format(license_name) }}{% endif %} - Initial Release macros.md.j2000066400000000000000000000200041475670435200337120ustar00rootroot00000000000000python-semantic-release-9.21.0/src/semantic_release/data/templates/angular/md/.components{# MACRO: format a inline link reference in Markdown #}{% macro format_link(link, label) %}{{ "[%s](%s)" | format(label, link) }}{% endmacro %} {# MACRO: commit message links or PR/MR links of commit #}{% macro commit_msg_links(commit) %}{% if commit.error is undefined %}{% set commit_hash_link = format_link( commit.hexsha | commit_hash_url, "`%s`" | format(commit.short_hash) ) %}{# #}{% set summary_line = commit.descriptions[0] | safe %}{% set summary_line = [ summary_line.split(" ", maxsplit=1)[0] | capitalize, summary_line.split(" ", maxsplit=1)[1] ] | join(" ") %}{# #}{% if commit.linked_merge_request != "" %}{# # Add PR references with a link to the PR #}{% set pr_num = commit.linked_merge_request %}{% set pr_link = format_link(pr_num | pull_request_url, pr_num) %}{# # TODO: breaking change v10, remove summary line replacers as PSR will do it for us #}{% set summary_line = summary_line | replace("(pull request", "(") | replace("(" ~ pr_num ~ ")", "") | trim %}{% set summary_line = "%s (%s, %s)" | format( summary_line, pr_link, commit_hash_link, ) %}{# # DEFAULT: No PR identifier found, so just append commit hash as url to the commit summary_line #}{% else %}{% set summary_line = "%s (%s)" | format(summary_line, commit_hash_link) %}{% endif %}{# # Return the modified summary_line #}{{ summary_line }}{% endif %}{% endmacro %} {# MACRO: format commit summary line #}{% macro format_commit_summary_line(commit) %}{# # Check for Parsing Error #}{% if commit.error is undefined %}{# # # Add any message links to the commit summary line #}{% set summary_line = commit_msg_links(commit) %}{# #}{% if commit.scope %}{% set summary_line = "**%s**: %s" | format(commit.scope, summary_line) %}{% endif %}{# # # Return the modified summary_line #}{{ summary_line }}{# #}{% else %}{# # Return the first line of the commit if there was a Parsing Error #}{{ (commit.commit.message | string).split("\n", maxsplit=1)[0] }}{% endif %}{% endmacro %} {# MACRO: format the breaking changes description by: - Capitalizing the description - Adding an optional scope prefix #}{% macro format_breaking_changes_description(commit) %}{% set ns = namespace(full_description="") %}{# #}{% if commit.error is undefined %}{% for paragraph in commit.breaking_descriptions %}{% if paragraph | trim | length > 0 %}{# #}{% set paragraph_text = [ paragraph.split(" ", maxsplit=1)[0] | capitalize, paragraph.split(" ", maxsplit=1)[1] ] | join(" ") | trim | safe %}{# #}{% set ns.full_description = [ ns.full_description, paragraph_text ] | join("\n\n") %}{# #}{% endif %}{% endfor %}{# #}{% set ns.full_description = ns.full_description | trim %}{# #}{% if commit.scope %}{% set ns.full_description = "**%s**: %s" | format( commit.scope, ns.full_description ) %}{% endif %}{% endif %}{# #}{{ ns.full_description }}{% endmacro %} {# MACRO: format the release notice by: - Capitalizing the description - Adding an optional scope prefix #}{% macro format_release_notice(commit) %}{% set ns = namespace(full_description="") %}{# #}{% if commit.error is undefined %}{% for paragraph in commit.release_notices %}{% if paragraph | trim | length > 0 %}{# #}{% set paragraph_text = [ paragraph.split(" ", maxsplit=1)[0] | capitalize, paragraph.split(" ", maxsplit=1)[1] ] | join(" ") | trim | safe %}{# #}{% set ns.full_description = [ ns.full_description, paragraph_text ] | join("\n\n") %}{# #}{% endif %}{% endfor %}{# #}{% set ns.full_description = ns.full_description | trim %}{# #}{% if commit.scope %}{% set ns.full_description = "**%s**: %s" | format( commit.scope, ns.full_description ) %}{% endif %}{% endif %}{# #}{{ ns.full_description }}{% endmacro %} {# MACRO: apply smart ordering of commits objects based on alphabetized summaries and then scopes - Commits are sorted based on the commit type and the commit message - Commits are grouped by the commit type - parameter: ns (namespace) object with a commits list - returns None but modifies the ns.commits list in place #}{% macro apply_alphabetical_ordering_by_descriptions(ns) %}{% set ordered_commits = [] %}{# # # Eliminate any ParseError commits from input set #}{% set filtered_commits = ns.commits | rejectattr("error", "defined") | list %}{# # # grab all commits with no scope and sort alphabetically by the first line of the commit message #}{% for commit in filtered_commits | rejectattr("scope") | sort(attribute='descriptions.0') %}{{ ordered_commits.append(commit) | default("", true) }}{% endfor %}{# # # grab all commits with a scope and sort alphabetically by the scope and then the first line of the commit message #}{% for commit in filtered_commits | selectattr("scope") | sort(attribute='scope,descriptions.0') %}{{ ordered_commits.append(commit) | default("", true) }}{% endfor %}{# # # Return the ordered commits #}{% set ns.commits = ordered_commits %}{% endmacro %} {# MACRO: apply smart ordering of commits objects based on alphabetized breaking changes and then scopes - Commits are sorted based on the commit type and the commit message - Commits are grouped by the commit type - parameter: ns (namespace) object with a commits list - returns None but modifies the ns.commits list in place #}{% macro apply_alphabetical_ordering_by_brk_descriptions(ns) %}{% set ordered_commits = [] %}{# # # Eliminate any ParseError commits from input set #}{% set filtered_commits = ns.commits | rejectattr("error", "defined") | list %}{# # # grab all commits with no scope and sort alphabetically by the first line of the commit message #}{% for commit in filtered_commits | rejectattr("scope") | sort(attribute='breaking_descriptions.0') %}{{ ordered_commits.append(commit) | default("", true) }}{% endfor %}{# # # grab all commits with a scope and sort alphabetically by the scope and then the first line of the commit message #}{% for commit in filtered_commits | selectattr("scope") | sort(attribute='scope,breaking_descriptions.0') %}{{ ordered_commits.append(commit) | default("", true) }}{% endfor %}{# # # Return the ordered commits #}{% set ns.commits = ordered_commits %}{% endmacro %} {# MACRO: apply smart ordering of commits objects based on alphabetized release notices and then scopes - Commits are sorted based on the commit type and the commit message - Commits are grouped by the commit type - parameter: ns (namespace) object with a commits list - returns None but modifies the ns.commits list in place #}{% macro apply_alphabetical_ordering_by_release_notices(ns) %}{% set ordered_commits = [] %}{# # # Eliminate any ParseError commits from input set #}{% set filtered_commits = ns.commits | rejectattr("error", "defined") | list %}{# # # grab all commits with no scope and sort alphabetically by the first line of the commit message #}{% for commit in filtered_commits | rejectattr("scope") | sort(attribute='release_notices.0') %}{{ ordered_commits.append(commit) | default("", true) }}{% endfor %}{# # # grab all commits with a scope and sort alphabetically by the scope and then the first line of the commit message #}{% for commit in filtered_commits | selectattr("scope") | sort(attribute='scope,release_notices.0') %}{{ ordered_commits.append(commit) | default("", true) }}{% endfor %}{# # # Return the ordered commits #}{% set ns.commits = ordered_commits %}{% endmacro %} unreleased_changes.md.j2000066400000000000000000000002611475670435200362500ustar00rootroot00000000000000python-semantic-release-9.21.0/src/semantic_release/data/templates/angular/md/.components{% if unreleased_commits | length > 0 %}{{ "\n## Unreleased\n" }}{% set commit_objects = unreleased_commits %}{% include "changes.md.j2" -%}{{ "\n" }}{% endif %} versioned_changes.md.j2000066400000000000000000000007661475670435200361310ustar00rootroot00000000000000python-semantic-release-9.21.0/src/semantic_release/data/templates/angular/md/.components{# EXAMPLE: ## vX.X.X (YYYY-MMM-DD) _This release is published under the MIT License._ # Release Notes Only {{ change_sections }} #}{{ "## %s (%s)\n" | format( release.version.as_semver_tag(), release.tagged_date.strftime("%Y-%m-%d") ) }}{% if license_name is defined and license_name %}{{ "\n_This release is published under the %s License._\n" | format(license_name) }}{% endif %}{# #}{% set commit_objects = release["elements"] | dictsort %}{% include "changes.md.j2" -%} python-semantic-release-9.21.0/src/semantic_release/data/templates/angular/md/.release_notes.md.j2000066400000000000000000000047201475670435200331570ustar00rootroot00000000000000{# EXAMPLE: ## v1.0.0 (2020-01-01) _This release is published under the MIT License._ ### Features - Add new feature ([#10](https://domain.com/namespace/repo/pull/10), [`abcdef0`](https://domain.com/namespace/repo/commit/HASH)) - **scope**: Add new feature ([`abcdef0`](https://domain.com/namespace/repo/commit/HASH)) ### Bug Fixes - Fix bug (#11, [`abcdef1`](https://domain.com/namespace/repo/commit/HASH)) ### Breaking Changes - With the change _____, the change causes ___ effect. Ultimately, this section it is a more detailed description of the breaking change. With an optional scope prefix like the commit messages above. - **scope**: this breaking change has a scope to identify the part of the code that this breaking change applies to for better context. ### Additional Release Information - This is a release note that provides additional information about the release that is not a breaking change or a feature/bug fix. - **scope**: this release note has a scope to identify the part of the code that this release note applies to for better context. --- **Detailed Changes**: [vX.X.X...vX.X.X](https://domain.com/namespace/repo/compare/vX.X.X...vX.X.X) #}{# # Set line width to 1000 to avoid wrapping as GitHub will handle it #}{% set max_line_width = max_line_width | default(1000) %}{% set hanging_indent = hanging_indent | default(2) %}{% set license_name = license_name | default("", True) %}{% set releases = context.history.released.values() | list %}{% set curr_release_index = releases.index(release) %}{# #}{% if mask_initial_release and curr_release_index == releases | length - 1 %}{# # On a first release, generate our special message #}{% include ".components/first_release.md.j2" %}{% else %}{# # Not the first release so generate notes normally #}{% include ".components/versioned_changes.md.j2" -%}{# #}{% set prev_release_index = curr_release_index + 1 %}{# #}{% if 'compare_url' is filter and prev_release_index < releases | length %}{% set prev_version_tag = releases[prev_release_index].version.as_tag() %}{% set new_version_tag = release.version.as_tag() %}{% set version_compare_url = prev_version_tag | compare_url(new_version_tag) %}{% set detailed_changes_link = '[{}...{}]({})'.format( prev_version_tag, new_version_tag, version_compare_url ) %}{{ "\n" }}{{ "---\n" }}{{ "\n" }}{{ "**Detailed Changes**: %s" | format(detailed_changes_link) }}{% endif %}{% endif %} python-semantic-release-9.21.0/src/semantic_release/data/templates/angular/md/CHANGELOG.md.j2000066400000000000000000000013311475670435200315330ustar00rootroot00000000000000{# This changelog template controls which changelog creation occurs based on which mode is provided. Modes: - init: Initialize a full changelog from scratch - update: Insert new version details where the placeholder exists in the current changelog #}{% set insertion_flag = ctx.changelog_insertion_flag %}{% set unreleased_commits = ctx.history.unreleased | dictsort %}{% set releases = ctx.history.released.values() | list %}{# #}{% if ctx.changelog_mode == "init" %}{% include ".components/changelog_init.md.j2" %}{# #}{% elif ctx.changelog_mode == "update" %}{% set prev_changelog_file = ctx.prev_changelog_file %}{% include ".components/changelog_update.md.j2" %}{# #}{% endif %} python-semantic-release-9.21.0/src/semantic_release/data/templates/angular/rst/000077500000000000000000000000001475670435200276225ustar00rootroot00000000000000python-semantic-release-9.21.0/src/semantic_release/data/templates/angular/rst/.components/000077500000000000000000000000001475670435200320655ustar00rootroot00000000000000changelog_header.rst.j2000066400000000000000000000003101475670435200363030ustar00rootroot00000000000000python-semantic-release-9.21.0/src/semantic_release/data/templates/angular/rst/.components.. _changelog: ========= CHANGELOG ========= {% if ctx.changelog_mode == "update" %}{# # IMPORTANT: add insertion flag for next version update #}{{ insertion_flag ~ "\n" }}{% endif %} changelog_init.rst.j2000066400000000000000000000015421475670435200360260ustar00rootroot00000000000000python-semantic-release-9.21.0/src/semantic_release/data/templates/angular/rst/.components{# This changelog template initializes a full changelog for the project, it follows the following logic: 1. Header 2. Any Unreleased Details (uncommon) 3. all previous releases except the very first release 4. the first release #}{# # Header #}{% include "changelog_header.rst.j2" -%}{# # Any Unreleased Details (uncommon) #}{% include "unreleased_changes.rst.j2" -%}{# # Since this is initialization, we are generating all the previous # release notes per version. The very first release notes is specialized #}{% if releases | length > 0 %}{% for release in releases %}{{ "\n" }}{% if loop.last and ctx.mask_initial_release %}{%- include "first_release.rst.j2" -%}{% else %}{%- include "versioned_changes.rst.j2" -%}{% endif %}{{ "\n" }}{% endfor %}{% endif %} changelog_update.rst.j2000066400000000000000000000050541475670435200363470ustar00rootroot00000000000000python-semantic-release-9.21.0/src/semantic_release/data/templates/angular/rst/.components{# This Update changelog template uses the following logic: 1. Read previous changelog file (ex. project_root/CHANGELOG.md) 2. Split on insertion flag (ex. ) 3. Print top half of previous changelog 3. New Changes (unreleased commits & newly released) 4. Print bottom half of previous changelog Note: if a previous file was not found, it does not write anything at the bottom but render does NOT fail #}{% set prev_changelog_contents = prev_changelog_file | read_file | safe %}{% set changelog_parts = prev_changelog_contents.split(insertion_flag, maxsplit=1) %}{# #}{% if changelog_parts | length < 2 %}{# # insertion flag was not found, check if the file was empty or did not exist #}{% if prev_changelog_contents | length > 0 %}{# # File has content but no insertion flag, therefore, file will not be updated #}{{ changelog_parts[0] }}{% else %}{# # File was empty or did not exist, therefore, it will be created from scratch #}{% include "changelog_init.rst.j2" %}{% endif %}{% else %}{# # Previous Changelog Header # - Depending if there is header content, then it will separate the insertion flag # with a newline from header content, otherwise it will just print the insertion flag #}{% set prev_changelog_top = changelog_parts[0] | trim %}{% if prev_changelog_top | length > 0 %}{{ "%s\n\n%s\n" | format(prev_changelog_top, insertion_flag | trim) }}{% else %}{{ "%s\n" | format(insertion_flag | trim) }}{% endif %}{# # Any Unreleased Details (uncommon) #}{% include "unreleased_changes.rst.j2" -%}{# #}{% if releases | length > 0 %}{# # Latest Release Details #}{% set release = releases[0] %}{# #}{% if releases | length == 1 and ctx.mask_initial_release %}{# # First Release detected #}{{ "\n" }}{%- include "first_release.rst.j2" -%}{{ "\n" }}{# #}{% elif release.version.as_semver_tag() ~ " (" not in changelog_parts[1] %}{# # The release version is not already in the changelog so we add it #}{{ "\n" }}{%- include "versioned_changes.rst.j2" -%}{{ "\n" }}{# #}{% endif %}{% endif %}{# # Previous Changelog Footer # - skips printing footer if empty, which happens when the insertion_flag # was at the end of the file (ignoring whitespace) #}{% set previous_changelog_bottom = changelog_parts[1] | trim %}{% if previous_changelog_bottom | length > 0 %}{{ "\n%s\n" | format(previous_changelog_bottom) }}{% endif %}{% endif %} changes.rst.j2000066400000000000000000000162701475670435200344700ustar00rootroot00000000000000python-semantic-release-9.21.0/src/semantic_release/data/templates/angular/rst/.components{% from 'macros.rst.j2' import apply_alphabetical_ordering_by_brk_descriptions %}{% from 'macros.rst.j2' import apply_alphabetical_ordering_by_descriptions %}{% from 'macros.rst.j2' import apply_alphabetical_ordering_by_release_notices %}{% from 'macros.rst.j2' import extract_pr_link_reference, format_breaking_changes_description %}{% from 'macros.rst.j2' import format_commit_summary_line, format_link_reference %}{% from 'macros.rst.j2' import format_release_notice, generate_heading_underline %}{# Features -------- * Add new feature (`#10`_, `8a7b8ec`_) * **scope**: Add another feature (`abcdef0`_) Bug Fixes --------- * Fix bug (`#11`_, `8a7b8ec`_) Breaking Changes ---------------- * With the change _____, the change causes ___ effect. Ultimately, this section it is a more detailed description of the breaking change. With an optional scope prefix like the commit messages above. * **scope**: this breaking change has a scope to identify the part of the code that this breaking change applies to for better context. Additional Release Information ------------------------------ * This is a release note that provides additional information about the release that is not a breaking change or a feature/bug fix. * **scope**: this release note has a scope to identify the part of the code that this release note applies to for better context. .. _#10: https://domain.com/namespace/repo/pull/10 .. _#11: https://domain.com/namespace/repo/pull/11 .. _8a7B8ec: https://domain.com/owner/repo/commit/8a7b8ec .. _abcdef0: https://domain.com/owner/repo/commit/abcdef0 #}{% set max_line_width = max_line_width | default(100) %}{% set hanging_indent = hanging_indent | default(2) %}{# #}{% set post_paragraph_links = [] %}{# #}{% for type_, commits in commit_objects if type_ != "unknown" %}{# # PREPARE SECTION HEADER #}{% set section_header = "%s" | format(type_ | title) %}{# # # PREPROCESS COMMITS #}{% set ns = namespace(commits=commits) %}{% set _ = apply_alphabetical_ordering_by_descriptions(ns) %}{# #}{% set commit_descriptions = [] %}{# #}{% for commit in ns.commits %}{# # Extract PR/MR reference if it exists and store it for later #}{% set pr_link_reference = extract_pr_link_reference(commit) | default("", true) %}{% if pr_link_reference != "" %}{% set _ = post_paragraph_links.append(pr_link_reference) %}{% endif %}{# # # Always generate a commit hash reference link and store it for later #}{% set commit_hash_link_reference = format_link_reference( commit.hexsha | commit_hash_url, commit.short_hash ) %}{% set _ = post_paragraph_links.append(commit_hash_link_reference) %}{# # Generate the commit summary line and format it for RST # Update the first line with reference links and if commit description # has more than one line, add the rest of the lines # NOTE: This is specifically to make sure to not hide contents # of squash commits (until parse support is added) #}{% set description = "* %s" | format(format_commit_summary_line(commit)) %}{% if commit.descriptions | length > 1 %}{% set description = "%s\n\n%s" | format( description, commit.descriptions[1:] | join("\n\n") | trim ) %}{% endif %}{% set description = description | convert_md_to_rst %}{% set description = description | autofit_text_width(max_line_width, hanging_indent) %}{% set _ = commit_descriptions.append(description) %}{% endfor %}{# # # PRINT SECTION (Header & Commits) #}{% if commit_descriptions | length > 0 %}{{ "\n" }}{{ section_header ~ "\n" }}{{ generate_heading_underline(section_header, '-') }}{{ "\n" }}{{ "\n%s\n" | format(commit_descriptions | unique | join("\n\n")) }}{% endif %}{% endfor %}{# # Determine if there are any breaking change commits by filtering the list by breaking descriptions # commit_objects is a list of tuples [("Features", [ParsedCommit(), ...]), ("Bug Fixes", [ParsedCommit(), ...])] # HOW: Filter out breaking change commits that have no breaking descriptions # 1. Re-map the list to only the list of commits under the breaking category from the list of tuples # 2. Peel off the outer list to get a list of ParsedCommit objects # 3. Filter the list of ParsedCommits to only those with a breaking description #}{% set breaking_commits = commit_objects | map(attribute="1.0") %}{% set breaking_commits = breaking_commits | rejectattr("error", "defined") | selectattr("breaking_descriptions.0") | list %}{# #}{% if breaking_commits | length > 0 %}{# # PREPROCESS COMMITS #}{% set brk_ns = namespace(commits=breaking_commits) %}{% set _ = apply_alphabetical_ordering_by_brk_descriptions(brk_ns) %}{# #}{% set brking_descriptions = [] %}{# #}{% for commit in brk_ns.commits %}{% set full_description = "* %s" | format( format_breaking_changes_description(commit).split("\n\n") | join("\n\n* ") ) %}{% set _ = brking_descriptions.append( full_description | convert_md_to_rst | autofit_text_width(max_line_width, hanging_indent) ) %}{% endfor %}{# # # PRINT BREAKING CHANGE DESCRIPTIONS (header & descriptions) #}{{ "\n" }}{{ "Breaking Changes\n" }}{{ '----------------\n' }}{{ "\n%s\n" | format(brking_descriptions | unique | join("\n\n")) }}{# #}{% endif %}{# # Determine if there are any commits with release notice information by filtering the list by release_notices # commit_objects is a list of tuples [("Features", [ParsedCommit(), ...]), ("Bug Fixes", [ParsedCommit(), ...])] # HOW: Filter out commits that have no release notices # 1. Re-map the list to only the list of commits from the list of tuples # 2. Peel off the outer list to get a list of ParsedCommit objects # 3. Filter the list of ParsedCommits to only those with a release notice #}{% set notice_commits = commit_objects | map(attribute="1.0") %}{% set notice_commits = notice_commits | rejectattr("error", "defined") | selectattr("release_notices.0") | list %}{# #}{% if notice_commits | length > 0 %}{# PREPROCESS COMMITS #}{% set notice_ns = namespace(commits=notice_commits) %}{% set _ = apply_alphabetical_ordering_by_release_notices(notice_ns) %}{# #}{% set release_notices = [] %}{# #}{% for commit in notice_ns.commits %}{% set full_description = "* %s" | format( format_release_notice(commit).split("\n\n") | join("\n\n* ") ) %}{% set _ = release_notices.append( full_description | convert_md_to_rst | autofit_text_width(max_line_width, hanging_indent) ) %}{% endfor %}{# # # PRINT RELEASE NOTICE INFORMATION (header & descriptions) #}{{ "\n" }}{{ "Additional Release Information\n" }}{{ "------------------------------\n" }}{{ "\n%s\n" | format(release_notices | unique | join("\n\n")) }}{# #}{% endif %}{# # # # PRINT POST PARAGRAPH LINKS #}{% if post_paragraph_links | length > 0 %}{# # Print out any PR/MR or Issue URL references that were found in the commit messages #}{{ "\n%s\n" | format(post_paragraph_links | unique | sort | join("\n")) }}{% endif %} first_release.rst.j2000066400000000000000000000007161475670435200357050ustar00rootroot00000000000000python-semantic-release-9.21.0/src/semantic_release/data/templates/angular/rst/.components{% from "macros.rst.j2" import generate_heading_underline %}{# .. _changelog-vX.X.X: vX.X.X (YYYY-MMM-DD) ==================== * Initial Release #}{% set version_header = "%s (%s)" | format( release.version.as_semver_tag(), release.tagged_date.strftime("%Y-%m-%d") ) %} {{- ".. _changelog-%s:" | format(release.version.as_semver_tag()) }} {{ version_header }} {{ generate_heading_underline(version_header, "=") }} * Initial Release macros.rst.j2000066400000000000000000000216101475670435200343360ustar00rootroot00000000000000python-semantic-release-9.21.0/src/semantic_release/data/templates/angular/rst/.components{# MACRO: format a post-paragraph link reference in RST #}{% macro format_link_reference(link, label) %}{{ ".. _%s: %s" | format(label, link) }}{% endmacro %} {# MACRO: format commit summary line #}{% macro format_commit_summary_line(commit) %}{# # Check for Parsing Error #}{% if commit.error is undefined %}{# # # Add any message links to the commit summary line #}{% set summary_line = commit_msg_links(commit) %}{# #}{% if commit.scope %}{% set summary_line = "**%s**: %s" | format(commit.scope, summary_line) %}{% endif %}{# # # Return the modified summary_line #}{{ summary_line }}{# #}{% else %}{# # Return the first line of the commit if there was a Parsing Error #}{{ (commit.commit.message | string).split("\n", maxsplit=1)[0] }}{% endif %}{% endmacro %} {# MACRO: Create & return an non-inline RST link from a commit message - Returns empty string if no PR/MR identifier is found #}{% macro extract_pr_link_reference(commit) %}{% if commit.error is undefined %}{% set summary_line = commit.descriptions[0] %}{# #}{% if commit.linked_merge_request != "" %}{# # Create a PR/MR reference url #}{{ format_link_reference( commit.linked_merge_request | pull_request_url, commit.linked_merge_request, ) }}{% endif %}{% endif %}{% endmacro %} {# MACRO: formats a commit message for a non-inline RST link for a commit hash and/or PR/MR #}{% macro commit_msg_links(commit, hvcs_type) %}{% if commit.error is undefined %}{% set commit_hash_link = "`%s`_" | format(commit.short_hash) %}{# #}{% set summary_line = commit.descriptions[0] | safe %}{% set summary_line = [ summary_line.split(" ", maxsplit=1)[0] | capitalize, summary_line.split(" ", maxsplit=1)[1] ] | join(" ") %}{# #}{% if commit.linked_merge_request != "" %}{# # Add PR references with a link to the PR #}{% set pr_link = "`%s`_" | format(commit.linked_merge_request) %}{# # TODO: breaking change v10, remove summary line replacers as PSR will do it for us #}{% set summary_line = summary_line | replace("(pull request ", "(") | replace("(" ~ commit.linked_merge_request ~ ")", "") | trim %}{% set summary_line = "%s (%s, %s)" | format( summary_line, pr_link, commit_hash_link, ) %}{# # DEFAULT: No PR identifier found, so just append a commit hash as url to the commit summary_line #}{% else %}{% set summary_line = "%s (%s)" | format(summary_line, commit_hash_link) %}{% endif %}{# # Return the modified summary_line #}{{ summary_line }}{% endif %}{% endmacro %} {# MACRO: generate a heading underline that matches the exact length of the header #} {% macro generate_heading_underline(header, underline_char) %}{% set header_underline = [] %}{% for _ in header %}{{ header_underline.append(underline_char) | default("", true) }}{% endfor %}{# # Print out the header underline #}{{ header_underline | join }}{% endmacro %} {# MACRO: format the breaking changes description by: - Capitalizing the description - Adding an optional scope prefix #}{% macro format_breaking_changes_description(commit) %}{% set ns = namespace(full_description="") %}{# #}{% if commit.error is undefined %}{% for paragraph in commit.breaking_descriptions %}{% if paragraph | trim | length > 0 %}{# #}{% set paragraph_text = [ paragraph.split(" ", maxsplit=1)[0] | capitalize, paragraph.split(" ", maxsplit=1)[1] ] | join(" ") | trim | safe %}{# #}{% set ns.full_description = [ ns.full_description, paragraph_text ] | join("\n\n") %}{# #}{% endif %}{% endfor %}{# #}{% set ns.full_description = ns.full_description | trim %}{# #}{% if commit.scope %}{% set ns.full_description = "**%s**: %s" | format( commit.scope, ns.full_description ) %}{% endif %}{% endif %}{# #}{{ ns.full_description }}{% endmacro %} {# MACRO: format the release notice by: - Capitalizing the description - Adding an optional scope prefix #}{% macro format_release_notice(commit) %}{% set ns = namespace(full_description="") %}{# #}{% if commit.error is undefined %}{% for paragraph in commit.release_notices %}{% if paragraph | trim | length > 0 %}{# #}{% set paragraph_text = [ paragraph.split(" ", maxsplit=1)[0] | capitalize, paragraph.split(" ", maxsplit=1)[1] ] | join(" ") | trim | safe %}{# #}{% set ns.full_description = [ ns.full_description, paragraph_text ] | join("\n\n") %}{# #}{% endif %}{% endfor %}{# #}{% set ns.full_description = ns.full_description | trim %}{# #}{% if commit.scope %}{% set ns.full_description = "**%s**: %s" | format( commit.scope, ns.full_description ) %}{% endif %}{% endif %}{# #}{{ ns.full_description }}{% endmacro %} {# MACRO: apply smart ordering of commits objects based on alphabetized summaries and then scopes - Commits are sorted based on the commit type and the commit message - Commits are grouped by the commit type - parameter: ns (namespace) object with a commits list - returns None but modifies the ns.commits list in place #}{% macro apply_alphabetical_ordering_by_descriptions(ns) %}{% set ordered_commits = [] %}{# # # Eliminate any ParseError commits from input set #}{% set filtered_commits = ns.commits | rejectattr("error", "defined") | list %}{# # # grab all commits with no scope and sort alphabetically by the first line of the commit message #}{% for commit in filtered_commits | rejectattr("scope") | sort(attribute='descriptions.0') %}{{ ordered_commits.append(commit) | default("", true) }}{% endfor %}{# # # grab all commits with a scope and sort alphabetically by the scope and then the first line of the commit message #}{% for commit in filtered_commits | selectattr("scope") | sort(attribute='scope,descriptions.0') %}{{ ordered_commits.append(commit) | default("", true) }}{% endfor %}{# # # Return the ordered commits #}{% set ns.commits = ordered_commits %}{% endmacro %} {# MACRO: apply smart ordering of commits objects based on alphabetized breaking changes and then scopes - Commits are sorted based on the commit type and the commit message - Commits are grouped by the commit type - parameter: ns (namespace) object with a commits list - returns None but modifies the ns.commits list in place #}{% macro apply_alphabetical_ordering_by_brk_descriptions(ns) %}{% set ordered_commits = [] %}{# # # Eliminate any ParseError commits from input set #}{% set filtered_commits = ns.commits | rejectattr("error", "defined") | list %}{# # # grab all commits with no scope and sort alphabetically by the first line of the commit message #}{% for commit in filtered_commits | rejectattr("scope") | sort(attribute='breaking_descriptions.0') %}{{ ordered_commits.append(commit) | default("", true) }}{% endfor %}{# # # grab all commits with a scope and sort alphabetically by the scope and then the first line of the commit message #}{% for commit in filtered_commits | selectattr("scope") | sort(attribute='scope,breaking_descriptions.0') %}{{ ordered_commits.append(commit) | default("", true) }}{% endfor %}{# # # Return the ordered commits #}{% set ns.commits = ordered_commits %}{% endmacro %} {# MACRO: apply smart ordering of commits objects based on alphabetized release notices and then scopes - Commits are sorted based on the commit type and the commit message - Commits are grouped by the commit type - parameter: ns (namespace) object with a commits list - returns None but modifies the ns.commits list in place #}{% macro apply_alphabetical_ordering_by_release_notices(ns) %}{% set ordered_commits = [] %}{# # # Eliminate any ParseError commits from input set #}{% set filtered_commits = ns.commits | rejectattr("error", "defined") | list %}{# # # grab all commits with no scope and sort alphabetically by the first line of the commit message #}{% for commit in filtered_commits | rejectattr("scope") | sort(attribute='release_notices.0') %}{{ ordered_commits.append(commit) | default("", true) }}{% endfor %}{# # # grab all commits with a scope and sort alphabetically by the scope and then the first line of the commit message #}{% for commit in filtered_commits | selectattr("scope") | sort(attribute='scope,release_notices.0') %}{{ ordered_commits.append(commit) | default("", true) }}{% endfor %}{# # # Return the ordered commits #}{% set ns.commits = ordered_commits %}{% endmacro %} unreleased_changes.rst.j2000066400000000000000000000003121475670435200366650ustar00rootroot00000000000000python-semantic-release-9.21.0/src/semantic_release/data/templates/angular/rst/.components{% if unreleased_commits | length > 0 %} .. _changelog-unreleased: Unreleased ========== {% set commit_objects = unreleased_commits %}{% include "changes.rst.j2" -%}{{ "\n" }}{% endif %} versioned_changes.rst.j2000066400000000000000000000010511475670435200365350ustar00rootroot00000000000000python-semantic-release-9.21.0/src/semantic_release/data/templates/angular/rst/.components{% from 'macros.rst.j2' import generate_heading_underline %}{# .. _changelog-X.X.X: vX.X.X (YYYY-MMM-DD) ==================== {{ change_sections }} #}{% set version_header = "%s (%s)" | format( release.version.as_semver_tag(), release.tagged_date.strftime("%Y-%m-%d") ) %}{# #}{{ ".. _changelog-%s:" | format(release.version.as_semver_tag()) }} {{ version_header }} {{ generate_heading_underline(version_header, "=") }} {# #}{% set commit_objects = release["elements"] | dictsort %}{% include "changes.rst.j2" -%} python-semantic-release-9.21.0/src/semantic_release/data/templates/angular/rst/CHANGELOG.rst.j2000066400000000000000000000013331475670435200321550ustar00rootroot00000000000000{# This changelog template controls which changelog creation occurs based on which mode is provided. Modes: - init: Initialize a full changelog from scratch - update: Insert new version details where the placeholder exists in the current changelog #}{% set insertion_flag = ctx.changelog_insertion_flag %}{% set unreleased_commits = ctx.history.unreleased | dictsort %}{% set releases = ctx.history.released.values() | list %}{# #}{% if ctx.changelog_mode == "init" %}{% include ".components/changelog_init.rst.j2" %}{# #}{% elif ctx.changelog_mode == "update" %}{% set prev_changelog_file = ctx.prev_changelog_file %}{% include ".components/changelog_update.rst.j2" %}{# #}{% endif %} python-semantic-release-9.21.0/src/semantic_release/enums.py000066400000000000000000000033201475670435200241510ustar00rootroot00000000000000from __future__ import annotations import logging from enum import IntEnum, unique @unique class LevelBump(IntEnum): """ IntEnum representing valid types of bumps for a version. We use an IntEnum to enable ordering of levels. """ NO_RELEASE = 0 PRERELEASE_REVISION = 1 PATCH = 2 MINOR = 3 MAJOR = 4 def __str__(self) -> str: """ Return the level name rather than 'LevelBump.' E.g. >>> str(LevelBump.NO_RELEASE) 'no_release' >>> str(LevelBump.MAJOR) 'major' """ return self.name.lower() @classmethod def from_string(cls, val: str) -> LevelBump: """ Get the level from string representation. For backwards-compatibility, dashes are replaced with underscores so that: >>> LevelBump.from_string("no-release") == LevelBump.NO_RELEASE Equally, >>> LevelBump.from_string("minor") == LevelBump.MINOR """ return cls[val.upper().replace("-", "_")] class SemanticReleaseLogLevels(IntEnum): """IntEnum representing the log levels used by semantic-release.""" FATAL = logging.FATAL CRITICAL = logging.CRITICAL ERROR = logging.ERROR WARNING = logging.WARNING INFO = logging.INFO DEBUG = logging.DEBUG SILLY = 5 def __str__(self) -> str: """ Return the level name rather than 'SemanticReleaseLogLevels.' E.g. >>> str(SemanticReleaseLogLevels.DEBUG) 'DEBUG' >>> str(SemanticReleaseLogLevels.CRITICAL) 'CRITICAL' """ return self.name.upper() logging.addLevelName( SemanticReleaseLogLevels.SILLY, str(SemanticReleaseLogLevels.SILLY), ) python-semantic-release-9.21.0/src/semantic_release/errors.py000066400000000000000000000055301475670435200243430ustar00rootroot00000000000000"""Custom Errors""" class SemanticReleaseBaseError(Exception): """ Base Exception from which all other custom Exceptions defined in semantic_release inherit """ class InternalError(SemanticReleaseBaseError): """Raised when an internal error occurs, which should never happen""" class InvalidConfiguration(SemanticReleaseBaseError): """Raised when configuration is deemed invalid""" class InvalidParserOptions(InvalidConfiguration): """Raised when the parser options are invalid""" class MissingGitRemote(SemanticReleaseBaseError): """Raised when repository is missing the configured remote origin or upstream""" class InvalidVersion(ValueError, SemanticReleaseBaseError): """ Raised when Version.parse attempts to parse a string containing an invalid version. """ class NotAReleaseBranch(InvalidConfiguration): """ Raised when semantic_release is invoked on a branch which isn't configured for releases """ class DetachedHeadGitError(SemanticReleaseBaseError): """Raised when the git repository is in a detached HEAD state""" class CommitParseError(SemanticReleaseBaseError): """ Raised when a commit cannot be parsed by a commit parser. Custom commit parsers should also raise this Exception """ class MissingMergeBaseError(SemanticReleaseBaseError): """ Raised when the merge base cannot be found with the current history. Generally because of a shallow git clone. """ class UnexpectedResponse(SemanticReleaseBaseError): """ Raised when an HTTP response cannot be parsed properly or the expected structure is not found. """ class IncompleteReleaseError(SemanticReleaseBaseError): """ Raised when there is a failure amongst one of the api requests when creating a release on a remote hvcs. """ class AssetUploadError(SemanticReleaseBaseError): """ Raised when there is a failure uploading an asset to a remote hvcs's release artifact storage. """ class ParserLoadError(SemanticReleaseBaseError): """ Raised when there is a failure to find, load, or instantiate a custom parser definition. """ class BuildDistributionsError(SemanticReleaseBaseError): """Raised when there is a failure to build the distribution files.""" class GitAddError(SemanticReleaseBaseError): """Raised when there is a failure to add files to the git index.""" class GitCommitError(SemanticReleaseBaseError): """Raised when there is a failure to commit the changes.""" class GitCommitEmptyIndexError(SemanticReleaseBaseError): """Raised when there is an attempt to commit an empty index.""" class GitTagError(SemanticReleaseBaseError): """Raised when there is a failure to tag the release.""" class GitPushError(SemanticReleaseBaseError): """Raised when there is a failure to push to the git remote.""" python-semantic-release-9.21.0/src/semantic_release/gitproject.py000066400000000000000000000223161475670435200252020ustar00rootroot00000000000000"""Module for git related operations.""" from __future__ import annotations from contextlib import nullcontext from datetime import datetime from logging import getLogger from pathlib import Path from typing import TYPE_CHECKING from git import GitCommandError, Repo from semantic_release.cli.masking_filter import MaskingFilter from semantic_release.cli.util import indented, noop_report from semantic_release.errors import ( GitAddError, GitCommitEmptyIndexError, GitCommitError, GitPushError, GitTagError, ) if TYPE_CHECKING: # pragma: no cover from contextlib import _GeneratorContextManager from logging import Logger from typing import Sequence from git import Actor class GitProject: def __init__( self, directory: Path | str = ".", commit_author: Actor | None = None, credential_masker: MaskingFilter | None = None, ) -> None: self._project_root = Path(directory).resolve() self._logger = getLogger(__name__) self._cred_masker = credential_masker or MaskingFilter() self._commit_author = commit_author @property def project_root(self) -> Path: return self._project_root @property def logger(self) -> Logger: return self._logger def _get_custom_environment( self, repo: Repo, custom_vars: dict[str, str] | None = None, ) -> nullcontext[None] | _GeneratorContextManager[None]: """ git.custom_environment is a context manager but is not reentrant, so once we have "used" it we need to throw it away and re-create it in order to use it again """ author_vars = ( { "GIT_AUTHOR_NAME": self._commit_author.name, "GIT_AUTHOR_EMAIL": self._commit_author.email, "GIT_COMMITTER_NAME": self._commit_author.name, "GIT_COMMITTER_EMAIL": self._commit_author.email, } if self._commit_author else {} ) custom_env_vars = { **author_vars, **(custom_vars or {}), } return ( nullcontext() if not custom_env_vars else repo.git.custom_environment(**custom_env_vars) ) def is_dirty(self) -> bool: with Repo(str(self.project_root)) as repo: return repo.is_dirty() def git_add( self, paths: Sequence[Path | str], force: bool = False, strict: bool = False, noop: bool = False, ) -> None: if noop: noop_report( indented( f"""\ would have run: git add {str.join(" ", [str(Path(p)) for p in paths])} """ ) ) return git_args = dict( filter( lambda k_v: k_v[1], # if truthy { "force": force, }.items(), ) ) with Repo(str(self.project_root)) as repo: # TODO: in future this loop should be 1 line: # repo.index.add(all_paths_to_add, force=False) # noqa: ERA001 # but since 'force' is deliberately ineffective (as in docstring) in gitpython 3.1.18 # we have to do manually add each filepath, and catch the exception if it is an ignored file for updated_path in paths: try: repo.git.add(str(Path(updated_path)), **git_args) except GitCommandError as err: # noqa: PERF203, acceptable performance loss err_msg = f"Failed to add path ({updated_path}) to index" if strict: self.logger.exception(str(err)) raise GitAddError(err_msg) from err self.logger.warning(err_msg) def git_commit( self, message: str, date: int | None = None, commit_all: bool = False, no_verify: bool = False, noop: bool = False, ) -> None: git_args = dict( filter( lambda k_v: k_v[1], # if truthy { "a": commit_all, "m": message, "date": date, "no_verify": no_verify, }.items(), ) ) if noop: command = ( f"""\ GIT_AUTHOR_NAME={self._commit_author.name} \\ GIT_AUTHOR_EMAIL={self._commit_author.email} \\ GIT_COMMITTER_NAME={self._commit_author.name} \\ GIT_COMMITTER_EMAIL={self._commit_author.email} \\ """ if self._commit_author else "" ) # Indents the newlines so that terminal formatting is happy - note the # git commit line of the output is 24 spaces indented too # Only this message needs such special handling because of the newlines # that might be in a commit message between the subject and body indented_commit_message = message.replace("\n\n", "\n\n" + " " * 24) command += f"git commit -m '{indented_commit_message}'" command += "--all" if commit_all else "" command += "--no-verify" if no_verify else "" noop_report( indented( f"""\ would have run: {command} """ ) ) return with Repo(str(self.project_root)) as repo: has_index_changes = bool(repo.index.diff("HEAD")) has_working_changes = self.is_dirty() will_commit_files = has_index_changes or ( has_working_changes and commit_all ) if not will_commit_files: raise GitCommitEmptyIndexError("No changes to commit!") with self._get_custom_environment(repo): try: repo.git.commit(**git_args) except GitCommandError as err: self.logger.exception(str(err)) raise GitCommitError("Failed to commit changes") from err def git_tag( self, tag_name: str, message: str, isotimestamp: str, noop: bool = False ) -> None: try: datetime.fromisoformat(isotimestamp) except ValueError as err: raise ValueError("Invalid timestamp format") from err if noop: command = str.join( " ", [ f"GIT_COMMITTER_DATE={isotimestamp}", *( [ f"GIT_AUTHOR_NAME={self._commit_author.name}", f"GIT_AUTHOR_EMAIL={self._commit_author.email}", f"GIT_COMMITTER_NAME={self._commit_author.name}", f"GIT_COMMITTER_EMAIL={self._commit_author.email}", ] if self._commit_author else [""] ), f"git tag -a {tag_name} -m '{message}'", ], ) noop_report( indented( f"""\ would have run: {command} """ ) ) return with Repo(str(self.project_root)) as repo, self._get_custom_environment( repo, {"GIT_COMMITTER_DATE": isotimestamp}, ): try: repo.git.tag("-a", tag_name, m=message) except GitCommandError as err: self.logger.exception(str(err)) raise GitTagError(f"Failed to create tag ({tag_name})") from err def git_push_branch(self, remote_url: str, branch: str, noop: bool = False) -> None: if noop: noop_report( indented( f"""\ would have run: git push {self._cred_masker.mask(remote_url)} {branch} """ ) ) return with Repo(str(self.project_root)) as repo: try: repo.git.push(remote_url, branch) except GitCommandError as err: self.logger.exception(str(err)) raise GitPushError( f"Failed to push branch ({branch}) to remote" ) from err def git_push_tag(self, remote_url: str, tag: str, noop: bool = False) -> None: if noop: noop_report( indented( f"""\ would have run: git push {self._cred_masker.mask(remote_url)} tag {tag} """ # noqa: E501 ) ) return with Repo(str(self.project_root)) as repo: try: repo.git.push(remote_url, "tag", tag) except GitCommandError as err: self.logger.exception(str(err)) raise GitPushError(f"Failed to push tag ({tag}) to remote") from err python-semantic-release-9.21.0/src/semantic_release/globals.py000066400000000000000000000004001475670435200244410ustar00rootroot00000000000000"""Semantic Release Global Variables.""" from __future__ import annotations from semantic_release.enums import SemanticReleaseLogLevels log_level: SemanticReleaseLogLevels = SemanticReleaseLogLevels.WARNING """int: Logging level for semantic-release""" python-semantic-release-9.21.0/src/semantic_release/helpers.py000066400000000000000000000233731475670435200244760ustar00rootroot00000000000000from __future__ import annotations import importlib.util import logging import os import re import string import sys from functools import lru_cache, reduce, wraps from pathlib import Path, PurePosixPath from re import IGNORECASE, compile as regexp from typing import TYPE_CHECKING, Any, Callable, NamedTuple, Sequence, TypeVar from urllib.parse import urlsplit if TYPE_CHECKING: # pragma: no cover from re import Pattern from typing import Iterable log = logging.getLogger(__name__) number_pattern = regexp(r"(?P\S*?)(?P\d[\d,]*)\b") hex_number_pattern = regexp( r"(?P\S*?)(?:0x)?(?P[0-9a-f]+)\b", IGNORECASE ) def get_number_from_str( string: str, default: int = -1, interpret_hex: bool = False ) -> int: if interpret_hex and (match := hex_number_pattern.search(string)): return abs(int(match.group("number"), 16)) if match := number_pattern.search(string): return int(match.group("number")) return default def sort_numerically( iterable: Iterable[str], reverse: bool = False, allow_hex: bool = False ) -> list[str]: # Alphabetically sort prefixes first, then sort by number alphabetized_list = sorted(iterable) # Extract prefixes in order to group items by prefix unmatched_items = [] prefixes: dict[str, list[str]] = {} for item in alphabetized_list: if not ( pattern_match := ( (hex_number_pattern.search(item) if allow_hex else None) or number_pattern.search(item) ) ): unmatched_items.append(item) continue prefix = prefix if (prefix := pattern_match.group("prefix")) else "" if prefix not in prefixes: prefixes[prefix] = [] prefixes[prefix].append(item) # Sort prefixes and items by number mixing in unmatched items as alphabetized with other prefixes return reduce( lambda acc, next_item: acc + next_item, [ ( sorted( prefixes[prefix], key=lambda x: get_number_from_str( x, default=-1, interpret_hex=allow_hex ), reverse=reverse, ) if prefix in prefixes else [prefix] ) for prefix in sorted([*prefixes.keys(), *unmatched_items]) ], [], ) def text_reducer(text: str, filter_pair: tuple[Pattern[str], str]) -> str: """Reduce function to apply mulitple filters to a string""" if not text: # abort if the paragraph is empty return text filter_pattern, replacement = filter_pair return filter_pattern.sub(replacement, text) def validate_types_in_sequence( sequence: Sequence, types: type | tuple[type, ...] ) -> bool: """Validate that all elements in a sequence are of a specific type""" return all(isinstance(item, types) for item in sequence) def format_arg(value: Any) -> str: """Helper to format an argument an argument for logging""" if type(value) == str: return f"'{value.strip()}'" return str(value) def check_tag_format(tag_format: str) -> None: if "version" not in (f[1] for f in string.Formatter().parse(tag_format)): raise ValueError( f"Invalid tag_format {tag_format!r}, must use 'version' as a format key" ) _R = TypeVar("_R") _FuncType = Callable[..., _R] def logged_function(logger: logging.Logger) -> Callable[[_FuncType[_R]], _FuncType[_R]]: """ Decorator which adds debug logging of a function's input arguments and return value. The input arguments are logged before the function is called, and the return value is logged once it has completed. :param logger: Logger to send output to. """ def _logged_function(func: _FuncType[_R]) -> _FuncType[_R]: @wraps(func) def _wrapper(*args: Any, **kwargs: Any) -> _R: logger.debug( "%s(%s, %s)", func.__name__, ", ".join([format_arg(x) for x in args]), ", ".join([f"{k}={format_arg(v)}" for k, v in kwargs.items()]), ) # Call function result = func(*args, **kwargs) # Log result logger.debug("%s -> %s", func.__qualname__, str(result)) return result return _wrapper return _logged_function @logged_function(log) def dynamic_import(import_path: str) -> Any: """ Dynamically import an object from a conventionally formatted "module:attribute" string """ if ":" not in import_path: raise ValueError( f"Invalid import path {import_path!r}, must use 'module:Class' format" ) # Split the import path into module and attribute module_name, attr = import_path.split(":", maxsplit=1) # Check if the module is a file path, if it can be resolved and exists on disk then import as a file module_filepath = Path(module_name).resolve() if module_filepath.exists(): module_path = ( module_filepath.stem if Path(module_name).is_absolute() else str(Path(module_name).with_suffix("")).replace(os.sep, ".").lstrip(".") ) if module_path not in sys.modules: log.debug("Loading '%s' from file '%s'", module_path, module_filepath) spec = importlib.util.spec_from_file_location( module_path, str(module_filepath) ) if spec is None: raise ImportError(f"Could not import {module_filepath}") module = importlib.util.module_from_spec(spec) # type: ignore[arg-type] sys.modules.update({spec.name: module}) spec.loader.exec_module(module) # type: ignore[union-attr] return getattr(sys.modules[module_path], attr) # Otherwise, import as a module try: log.debug("Importing module '%s'", module_name) module = importlib.import_module(module_name) log.debug("Loading '%s' from module '%s'", attr, module_name) return getattr(module, attr) except TypeError as err: raise ImportError( str.join( "\n", [ str(err.args[0]), "Verify the import format matches 'module:attribute' or 'path/to/module:attribute'", ], ) ) from err class ParsedGitUrl(NamedTuple): """Container for the elements parsed from a git URL""" scheme: str netloc: str namespace: str repo_name: str @lru_cache(maxsize=512) def parse_git_url(url: str) -> ParsedGitUrl: """ Attempt to parse a string as a git url http[s]://, git://, file://, or ssh format, into a ParsedGitUrl. supported examples: http://git.mycompany.com/username/myproject.git https://github.com/username/myproject.git https://gitlab.com/group/subgroup/myproject.git https://git.mycompany.com:4443/username/myproject.git git://host.xz/path/to/repo.git/ git://host.xz:9418/path/to/repo.git/ git@github.com:username/myproject.git <-- assumes ssh:// ssh://git@github.com:3759/myproject.git <-- non-standard, but assume user 3759 ssh://git@github.com:username/myproject.git ssh://git@bitbucket.org:7999/username/myproject.git git+ssh://git@github.com:username/myproject.git /Users/username/dev/remote/myproject.git <-- Posix File paths file:///Users/username/dev/remote/myproject.git C:/Users/username/dev/remote/myproject.git <-- Windows File paths file:///C:/Users/username/dev/remote/myproject.git REFERENCE: https://stackoverflow.com/questions/31801271/what-are-the-supported-git-url-formats Raises ValueError if the url can't be parsed. """ log.debug("Parsing git url %r", url) # Normalizers are a list of tuples of (pattern, replacement) normalizers = [ # normalize implicit ssh urls to explicit ssh:// (r"^([\w._-]+@)", r"ssh://\1"), # normalize git+ssh:// urls to ssh:// (r"^git\+ssh://", "ssh://"), # normalize an scp like syntax to URL compatible syntax # excluding port definitions (:#####) & including numeric usernames (r"(ssh://(?:[\w._-]+@)?[\w.-]+):(?!\d{1,5}/\w+/)(.*)$", r"\1/\2"), # normalize implicit file (windows || posix) urls to explicit file:// urls (r"^([C-Z]:/)|^/(\w)", r"file:///\1\2"), ] for pattern, replacement in normalizers: url = re.compile(pattern).sub(replacement, url) # run the url through urlsplit to separate out the parts urllib_split = urlsplit(url) # Fail if url scheme not found if not urllib_split.scheme: raise ValueError(f"Cannot parse {url!r}") # We have been able to parse the url with urlsplit, # so it's a (file|git|ssh|https?)://... structure # but we aren't validating the protocol scheme as its not our business # use PosixPath to normalize the path & then separate out the namespace & repo_name namespace, _, name = ( str(PurePosixPath(urllib_split.path)).lstrip("/").rpartition("/") ) # strip out the .git at the end of the repo_name if present name = name[:-4] if name.endswith(".git") else name # check that we have all the required parts of the url required_parts = [ urllib_split.scheme, # Allow empty net location for file:// urls True if urllib_split.scheme == "file" else urllib_split.netloc, namespace, name, ] if not all(required_parts): raise ValueError(f"Bad url: {url!r}") return ParsedGitUrl( scheme=urllib_split.scheme, netloc=urllib_split.netloc, namespace=namespace, repo_name=name, ) python-semantic-release-9.21.0/src/semantic_release/hvcs/000077500000000000000000000000001475670435200234155ustar00rootroot00000000000000python-semantic-release-9.21.0/src/semantic_release/hvcs/__init__.py000066400000000000000000000007561475670435200255360ustar00rootroot00000000000000from semantic_release.hvcs._base import HvcsBase from semantic_release.hvcs.bitbucket import Bitbucket from semantic_release.hvcs.gitea import Gitea from semantic_release.hvcs.github import Github from semantic_release.hvcs.gitlab import Gitlab from semantic_release.hvcs.remote_hvcs_base import RemoteHvcsBase from semantic_release.hvcs.token_auth import TokenAuth __all__ = [ "Bitbucket", "Gitea", "Github", "Gitlab", "HvcsBase", "RemoteHvcsBase", "TokenAuth", ] python-semantic-release-9.21.0/src/semantic_release/hvcs/_base.py000066400000000000000000000051501475670435200250410ustar00rootroot00000000000000"""Common functionality and interface for interacting with Git remote VCS""" from __future__ import annotations import logging import warnings from abc import ABCMeta, abstractmethod from functools import lru_cache from typing import TYPE_CHECKING from semantic_release.helpers import parse_git_url if TYPE_CHECKING: # pragma: no cover from typing import Any, Callable # Globals logger = logging.getLogger(__name__) class HvcsBase(metaclass=ABCMeta): """ Interface for subclasses interacting with a remote vcs environment Methods generally have a base implementation are implemented here but likely just provide a not-supported message but return gracefully This class cannot be instantated directly but must be inherited from and implement the designated abstract methods. """ def __init__(self, remote_url: str, *args: Any, **kwargs: Any) -> None: self._remote_url = remote_url if parse_git_url(remote_url) else "" self._name: str | None = None self._owner: str | None = None def _not_supported(self: HvcsBase, method_name: str) -> None: warnings.warn( f"{method_name} is not supported by {type(self).__qualname__}", stacklevel=2, ) @lru_cache(maxsize=1) def _get_repository_owner_and_name(self) -> tuple[str, str]: """ Parse the repository's remote url to identify the repository owner and name """ parsed_git_url = parse_git_url(self._remote_url) return parsed_git_url.namespace, parsed_git_url.repo_name @property def repo_name(self) -> str: if self._name is None: _, name = self._get_repository_owner_and_name() self._name = name return self._name @property def owner(self) -> str: if self._owner is None: _owner, _ = self._get_repository_owner_and_name() self._owner = _owner return self._owner @abstractmethod def remote_url(self, use_token: bool) -> str: """ Return the remote URL for the repository, including the token for authentication if requested by setting the `use_token` parameter to True, """ self._not_supported(self.remote_url.__name__) return "" @abstractmethod def get_changelog_context_filters(self) -> tuple[Callable[..., Any], ...]: """ Return a list of functions that can be used as filters in a Jinja2 template ex. filters to convert text to URLs for issues and commits """ self._not_supported(self.get_changelog_context_filters.__name__) return () python-semantic-release-9.21.0/src/semantic_release/hvcs/bitbucket.py000066400000000000000000000236621475670435200257540ustar00rootroot00000000000000"""Helper code for interacting with a Bitbucket remote VCS""" # Note: Bitbucket doesn't support releases. But it allows users to use # `semantic-release version` without having to specify `--no-vcs-release`. from __future__ import annotations import logging import os from functools import lru_cache from pathlib import PurePosixPath from re import compile as regexp from typing import TYPE_CHECKING from urllib3.util.url import Url, parse_url from semantic_release.hvcs.remote_hvcs_base import RemoteHvcsBase if TYPE_CHECKING: # pragma: no cover from typing import Any, Callable # Globals log = logging.getLogger(__name__) class Bitbucket(RemoteHvcsBase): """ Bitbucket HVCS interface for interacting with BitBucket repositories This class supports the following products: - BitBucket Cloud - BitBucket Data Center Server (on-premises installations) This interface does its best to detect which product is configured based on the provided domain. If it is the official `bitbucket.org`, the default domain, then it is considered as BitBucket Cloud which uses the subdomain `api.bitbucket.org/2.0` for api communication. If the provided domain is anything else, than it is assumed to be communicating with an on-premise or 3rd-party maintained BitBucket instance which matches with the BitBucket Data Center Server product. The on-prem server product uses a path prefix for handling api requests which is configured to be `server.domain/rest/api/1.0` based on the documentation in April 2024. """ OFFICIAL_NAME = "Bitbucket" DEFAULT_DOMAIN = "bitbucket.org" DEFAULT_API_SUBDOMAIN_PREFIX = "api" DEFAULT_API_PATH_CLOUD = "/2.0" DEFAULT_API_PATH_ONPREM = "/rest/api/1.0" DEFAULT_API_URL_CLOUD = f"https://{DEFAULT_API_SUBDOMAIN_PREFIX}.{DEFAULT_DOMAIN}{DEFAULT_API_PATH_CLOUD}" DEFAULT_ENV_TOKEN_NAME = "BITBUCKET_TOKEN" # noqa: S105 def __init__( self, remote_url: str, *, hvcs_domain: str | None = None, hvcs_api_domain: str | None = None, token: str | None = None, allow_insecure: bool = False, **kwargs: Any, # noqa: ARG002 ) -> None: super().__init__(remote_url) self.token = token # NOTE: Uncomment in the future when we actually have functionalty to # use the api, but currently there is none. # auth = None if not self.token else TokenAuth(self.token) # self.session = build_requests_session(auth=auth) domain_url = self._normalize_url( hvcs_domain or f"https://{self.DEFAULT_DOMAIN}", allow_insecure=allow_insecure, ) # Strip any auth, query or fragment from the domain self._hvcs_domain = parse_url( Url( scheme=domain_url.scheme, host=domain_url.host, port=domain_url.port, path=str(PurePosixPath(domain_url.path or "/")), ).url.rstrip("/") ) # Parse api domain if provided otherwise infer from domain api_domain_parts = self._normalize_url( hvcs_api_domain or self._derive_api_url_from_base_domain(), allow_insecure=allow_insecure, ) # As Bitbucket Cloud and Bitbucket Server (on-prem) have different api paths # lets check what we have been given and set the api url accordingly # ref: https://developer.atlassian.com/server/bitbucket/how-tos/command-line-rest/ # NOTE: BitBucket Server (on premise) uses a path prefix '/rest/api/1.0' for the api # while BitBucket Cloud uses a separate subdomain with '/2.0' path prefix is_bitbucket_cloud = bool( self.hvcs_domain.url == f"https://{self.DEFAULT_DOMAIN}" ) if ( is_bitbucket_cloud and hvcs_api_domain and api_domain_parts.url not in Bitbucket.DEFAULT_API_URL_CLOUD ): # Api was provied but is not a subset of the expected one, raise an error # we check for a subset because the user may not have provided the full api path # but the correct domain. If they didn't, then we are erroring out here. raise ValueError( f"Invalid api domain {api_domain_parts.url} for BitBucket Cloud. " f"Expected {Bitbucket.DEFAULT_API_URL_CLOUD}." ) # Set the api url to the default cloud one if we are on cloud, otherwise # use the verified api domain for a on-prem server self._api_url = parse_url( Bitbucket.DEFAULT_API_URL_CLOUD if is_bitbucket_cloud else Url( # Strip any auth, query or fragment from the domain scheme=api_domain_parts.scheme, host=api_domain_parts.host, port=api_domain_parts.port, path=str( PurePosixPath( # pass any custom server prefix path but ensure we don't # double up the api path in the case the user provided it str.replace( api_domain_parts.path or "", self.DEFAULT_API_PATH_ONPREM, "", ).lstrip("/") or "/", # apply the on-prem api path self.DEFAULT_API_PATH_ONPREM.lstrip("/"), ) ), ).url.rstrip("/") ) def _derive_api_url_from_base_domain(self) -> Url: return parse_url( Url( # infer from Domain url and append the api path **{ **self.hvcs_domain._asdict(), "host": self.hvcs_domain.host, "path": str( PurePosixPath( str.lstrip(self.hvcs_domain.path or "", "/") or "/", self.DEFAULT_API_PATH_ONPREM.lstrip("/"), ) ), } ).url.rstrip("/") ) @lru_cache(maxsize=1) def _get_repository_owner_and_name(self) -> tuple[str, str]: # ref: https://support.atlassian.com/bitbucket-cloud/docs/variables-and-secrets/ if "BITBUCKET_REPO_FULL_NAME" in os.environ: log.info("Getting repository owner and name from environment variables.") owner, name = os.environ["BITBUCKET_REPO_FULL_NAME"].rsplit("/", 1) return owner, name return super()._get_repository_owner_and_name() def remote_url(self, use_token: bool = True) -> str: """Get the remote url including the token for authentication if requested""" if not use_token: return self._remote_url if not self.token: raise ValueError("Requested to use token but no token set.") # If the user is set, assume the token is an user secret. This will work # on any repository the user has access to. # https://support.atlassian.com/bitbucket-cloud/docs/push-back-to-your-repository # If the user variable is not set, assume it is a repository token # which will only work on the repository it was created for. # https://support.atlassian.com/bitbucket-cloud/docs/using-access-tokens user = os.environ.get("BITBUCKET_USER", "x-token-auth") return self.create_server_url( auth=f"{user}:{self.token}" if user else self.token, path=f"/{self.owner}/{self.repo_name}.git", ) def compare_url(self, from_rev: str, to_rev: str) -> str: """ Get the Bitbucket comparison link between two version tags. :param from_rev: The older version to compare. :param to_rev: The newer version to compare. :return: Link to view a comparison between the two versions. """ return self.create_repo_url( repo_path=f"/branches/compare/{from_rev}%0D{to_rev}" ) def commit_hash_url(self, commit_hash: str) -> str: return self.create_repo_url(repo_path=f"/commits/{commit_hash}") def pull_request_url(self, pr_number: str | int) -> str: # Strips off any character prefix like '#' that usually exists if isinstance(pr_number, str) and ( match := regexp(r"(\d+)$").search(pr_number) ): try: pr_number = int(match.group(1)) except ValueError: return "" if isinstance(pr_number, int): return self.create_repo_url(repo_path=f"/pull-requests/{pr_number}") return "" @staticmethod def format_w_official_vcs_name(format_str: str) -> str: if "%s" in format_str: return format_str % Bitbucket.OFFICIAL_NAME if "{}" in format_str: return format_str.format(Bitbucket.OFFICIAL_NAME) if "{vcs_name}" in format_str: return format_str.format(vcs_name=Bitbucket.OFFICIAL_NAME) return format_str def get_changelog_context_filters(self) -> tuple[Callable[..., Any], ...]: return ( self.create_server_url, self.create_repo_url, self.commit_hash_url, self.compare_url, self.pull_request_url, self.format_w_official_vcs_name, ) def upload_dists(self, tag: str, dist_glob: str) -> int: return super().upload_dists(tag, dist_glob) def create_or_update_release( self, tag: str, release_notes: str, prerelease: bool = False ) -> int | str: return super().create_or_update_release(tag, release_notes, prerelease) def create_release( self, tag: str, release_notes: str, prerelease: bool = False, assets: list[str] | None = None, noop: bool = False, ) -> int | str: return super().create_release(tag, release_notes, prerelease, assets, noop) RemoteHvcsBase.register(Bitbucket) python-semantic-release-9.21.0/src/semantic_release/hvcs/gitea.py000066400000000000000000000321571475670435200250700ustar00rootroot00000000000000"""Helper code for interacting with a Gitea remote VCS""" from __future__ import annotations import glob import logging import os from pathlib import PurePosixPath from re import compile as regexp from typing import TYPE_CHECKING from requests import HTTPError, JSONDecodeError from urllib3.util.url import Url, parse_url from semantic_release.cli.util import noop_report from semantic_release.errors import ( AssetUploadError, IncompleteReleaseError, UnexpectedResponse, ) from semantic_release.helpers import logged_function from semantic_release.hvcs.remote_hvcs_base import RemoteHvcsBase from semantic_release.hvcs.token_auth import TokenAuth from semantic_release.hvcs.util import build_requests_session, suppress_not_found if TYPE_CHECKING: # pragma: no cover from typing import Any, Callable # Globals log = logging.getLogger(__name__) class Gitea(RemoteHvcsBase): """Gitea helper class""" OFFICIAL_NAME = "Gitea" DEFAULT_DOMAIN = "gitea.com" DEFAULT_API_PATH = "/api/v1" DEFAULT_ENV_TOKEN_NAME = "GITEA_TOKEN" # noqa: S105 def __init__( self, remote_url: str, *, hvcs_domain: str | None = None, token: str | None = None, allow_insecure: bool = False, **_kwargs: Any, ) -> None: super().__init__(remote_url) self.token = token auth = None if not self.token else TokenAuth(self.token) self.session = build_requests_session(auth=auth) domain_url = self._normalize_url( hvcs_domain or os.getenv("GITEA_SERVER_URL", "") or f"https://{self.DEFAULT_DOMAIN}", allow_insecure=allow_insecure, ) # Strip any auth, query or fragment from the domain self._hvcs_domain = parse_url( Url( scheme=domain_url.scheme, host=domain_url.host, port=domain_url.port, path=str(PurePosixPath(domain_url.path or "/")), ).url.rstrip("/") ) self._api_url = self._normalize_url( os.getenv("GITEA_API_URL", "").rstrip("/") or Url( # infer from Domain url and append the default api path **{ **self.hvcs_domain._asdict(), "path": f"{self.hvcs_domain.path or ''}{self.DEFAULT_API_PATH}", } ).url, allow_insecure=allow_insecure, ) @logged_function(log) def create_release( self, tag: str, release_notes: str, prerelease: bool = False, assets: list[str] | None = None, noop: bool = False, ) -> int: """ Create a new release Ref: https://gitea.com/api/swagger#/repository/repoCreateRelease :param tag: Tag to create release for :param release_notes: The release notes for this version :param prerelease: Whether or not this release should be specified as a prerelease :return: Whether the request succeeded """ if noop: noop_report( str.join( " ", [ f"would have created a release for tag {tag}", "with the following notes:\n", release_notes, ], ) ) if assets: noop_report( str.join( "\n", [ "would have uploaded the following assets to the release:", *assets, ], ) ) return -1 log.info("Creating release for tag %s", tag) releases_endpoint = self.create_api_url( endpoint=f"/repos/{self.owner}/{self.repo_name}/releases", ) response = self.session.post( releases_endpoint, json={ "tag_name": tag, "name": tag, "body": release_notes, "draft": False, "prerelease": prerelease, }, ) # Raise an error if the request was not successful response.raise_for_status() try: release_id: int = response.json()["id"] log.info("Successfully created release with ID: %s", release_id) except JSONDecodeError as err: raise UnexpectedResponse("Unreadable json response") from err except KeyError as err: raise UnexpectedResponse("JSON response is missing an id") from err errors = [] for asset in assets or []: log.info("Uploading asset %s", asset) try: self.upload_release_asset(release_id, asset) except HTTPError as err: errors.append( AssetUploadError(f"Failed asset upload for {asset}").with_traceback( err.__traceback__ ) ) if len(errors) < 1: return release_id for error in errors: log.exception(error) raise IncompleteReleaseError( f"Failed to upload asset{'s' if len(errors) > 1 else ''} to release!" ) @logged_function(log) @suppress_not_found def get_release_id_by_tag(self, tag: str) -> int | None: """ Get a release by its tag name https://gitea.com/api/swagger#/repository/repoGetReleaseByTag :param tag: Tag to get release for :return: ID of found release """ tag_endpoint = self.create_api_url( endpoint=f"/repos/{self.owner}/{self.repo_name}/releases/tags/{tag}", ) response = self.session.get(tag_endpoint) # Raise an error if the request was not successful response.raise_for_status() try: data = response.json() return data["id"] except JSONDecodeError as err: raise UnexpectedResponse("Unreadable json response") from err except KeyError as err: raise UnexpectedResponse("JSON response is missing an id") from err @logged_function(log) def edit_release_notes(self, release_id: int, release_notes: str) -> int: """ Edit a release with updated change notes https://gitea.com/api/swagger#/repository/repoEditRelease :param id: ID of release to update :param release_notes: The release notes for this version :return: The ID of the release that was edited """ log.info("Updating release %s", release_id) release_endpoint = self.create_api_url( endpoint=f"/repos/{self.owner}/{self.repo_name}/releases/{release_id}", ) response = self.session.patch( release_endpoint, json={"body": release_notes}, ) # Raise an error if the request was not successful response.raise_for_status() return release_id @logged_function(log) def create_or_update_release( self, tag: str, release_notes: str, prerelease: bool = False ) -> int: """ Post release changelog :param version: The version number :param changelog: The release notes for this version :return: The status of the request """ log.info("Creating release for %s", tag) try: return self.create_release(tag, release_notes, prerelease) except HTTPError as err: log.debug("error creating release: %s", err) log.debug("looking for an existing release to update") release_id = self.get_release_id_by_tag(tag) if release_id is None: raise ValueError( f"release id for tag {tag} not found, and could not be created" ) # If this errors we let it die log.debug("Found existing release %s, updating", release_id) return self.edit_release_notes(release_id, release_notes) @logged_function(log) def asset_upload_url(self, release_id: str) -> str: """ Get the correct upload url for a release https://gitea.com/api/swagger#/repository/repoCreateReleaseAttachment :param release_id: ID of the release to upload to """ return self.create_api_url( endpoint=f"/repos/{self.owner}/{self.repo_name}/releases/{release_id}/assets", ) @logged_function(log) def upload_release_asset( self, release_id: int, file: str, label: str | None = None, # noqa: ARG002 ) -> bool: """ Upload an asset to an existing release https://gitea.com/api/swagger#/repository/repoCreateReleaseAttachment :param release_id: ID of the release to upload to :param file: Path of the file to upload :param label: this parameter has no effect :return: The status of the request """ url = self.asset_upload_url(release_id) with open(file, "rb") as attachment: name = os.path.basename(file) content_type = "application/octet-stream" response = self.session.post( url, params={"name": name}, data={}, files={ "attachment": ( name, attachment, content_type, ), }, ) # Raise an error if the request was not successful response.raise_for_status() log.info( "Successfully uploaded %s to Gitea, url: %s, status code: %s", file, response.url, response.status_code, ) return True @logged_function(log) def upload_dists(self, tag: str, dist_glob: str) -> int: """ Upload distributions to a release :param tag: Tag to upload for :param path: Path to the dist directory :return: The number of distributions successfully uploaded """ # Find the release corresponding to this tag release_id = self.get_release_id_by_tag(tag=tag) if not release_id: log.warning("No release corresponds to tag %s, can't upload dists", tag) return 0 # Upload assets n_succeeded = 0 for file_path in ( f for f in glob.glob(dist_glob, recursive=True) if os.path.isfile(f) ): try: self.upload_release_asset(release_id, file_path) n_succeeded += 1 except HTTPError: # noqa: PERF203 log.exception("error uploading asset %s", file_path) return n_succeeded def remote_url(self, use_token: bool = True) -> str: """Get the remote url including the token for authentication if requested""" if not (self.token and use_token): return self._remote_url return self.create_server_url( auth=self.token, path=f"{self.owner}/{self.repo_name}.git", ) def commit_hash_url(self, commit_hash: str) -> str: return self.create_repo_url(repo_path=f"/commit/{commit_hash}") def issue_url(self, issue_num: str | int) -> str: # Strips off any character prefix like '#' that usually exists if isinstance(issue_num, str) and ( match := regexp(r"(\d+)$").search(issue_num) ): try: issue_num = int(match.group(1)) except ValueError: return "" if isinstance(issue_num, int): return self.create_repo_url(repo_path=f"/issues/{issue_num}") return "" def pull_request_url(self, pr_number: str | int) -> str: # Strips off any character prefix like '#' that usually exists if isinstance(pr_number, str) and ( match := regexp(r"(\d+)$").search(pr_number) ): try: pr_number = int(match.group(1)) except ValueError: return "" if isinstance(pr_number, int): return self.create_repo_url(repo_path=f"/pulls/{pr_number}") return "" def create_release_url(self, tag: str = "") -> str: tag_str = tag.strip() tag_path = f"tag/{tag_str}" if tag_str else "" return self.create_repo_url(repo_path=f"releases/{tag_path}") @staticmethod def format_w_official_vcs_name(format_str: str) -> str: if "%s" in format_str: return format_str % Gitea.OFFICIAL_NAME if "{}" in format_str: return format_str.format(Gitea.OFFICIAL_NAME) if "{vcs_name}" in format_str: return format_str.format(vcs_name=Gitea.OFFICIAL_NAME) return format_str def get_changelog_context_filters(self) -> tuple[Callable[..., Any], ...]: return ( self.create_server_url, self.create_repo_url, self.commit_hash_url, self.issue_url, self.pull_request_url, self.create_release_url, self.format_w_official_vcs_name, ) RemoteHvcsBase.register(Gitea) python-semantic-release-9.21.0/src/semantic_release/hvcs/github.py000066400000000000000000000502511475670435200252540ustar00rootroot00000000000000"""Helper code for interacting with a GitHub remote VCS""" from __future__ import annotations import glob import logging import mimetypes import os from functools import lru_cache from pathlib import PurePosixPath from re import compile as regexp from typing import TYPE_CHECKING from requests import HTTPError, JSONDecodeError from urllib3.util.url import Url, parse_url from semantic_release.cli.util import noop_report from semantic_release.errors import ( AssetUploadError, IncompleteReleaseError, UnexpectedResponse, ) from semantic_release.helpers import logged_function from semantic_release.hvcs.remote_hvcs_base import RemoteHvcsBase from semantic_release.hvcs.token_auth import TokenAuth from semantic_release.hvcs.util import build_requests_session, suppress_not_found if TYPE_CHECKING: # pragma: no cover from typing import Any, Callable # Globals log = logging.getLogger(__name__) # Add a mime type for wheels # Fix incorrect entries in the `mimetypes` registry. # On Windows, the Python standard library's `mimetypes` reads in # mappings from file extension to MIME type from the Windows # registry. Other applications can and do write incorrect values # to this registry, which causes `mimetypes.guess_type` to return # incorrect values, which causes TensorBoard to fail to render on # the frontend. # This method hard-codes the correct mappings for certain MIME # types that are known to be either used by python-semantic-release or # problematic in general. if mimetypes.guess_type("test.whl")[0] != "application/octet-stream": mimetypes.add_type("application/octet-stream", ".whl") if mimetypes.guess_type("test.md")[0] != "text/markdown": mimetypes.add_type("text/markdown", ".md") class Github(RemoteHvcsBase): """ GitHub HVCS interface for interacting with GitHub repositories This class supports the following products: - GitHub Free, Pro, & Team - GitHub Enterprise Cloud - GitHub Enterprise Server (on-premises installations) This interface does its best to detect which product is configured based on the provided domain. If it is the official `github.com`, the default domain, then it is considered as GitHub Enterprise Cloud which uses the subdomain `api.github.com` for api communication. If the provided domain is anything else, than it is assumed to be communicating with an on-premise or 3rd-party maintained GitHub instance which matches with the GitHub Enterprise Server product. The on-prem server product uses a path prefix for handling api requests which is configured to be `server.domain/api/v3` based on the documentation in April 2024. """ OFFICIAL_NAME = "GitHub" DEFAULT_DOMAIN = "github.com" DEFAULT_API_SUBDOMAIN_PREFIX = "api" DEFAULT_API_DOMAIN = f"{DEFAULT_API_SUBDOMAIN_PREFIX}.{DEFAULT_DOMAIN}" DEFAULT_API_PATH_CLOUD = "/" # no path prefix! DEFAULT_API_PATH_ONPREM = "/api/v3" DEFAULT_API_URL_CLOUD = f"https://{DEFAULT_API_SUBDOMAIN_PREFIX}.{DEFAULT_DOMAIN}{DEFAULT_API_PATH_CLOUD}".rstrip( "/" ) DEFAULT_ENV_TOKEN_NAME = "GH_TOKEN" # noqa: S105 def __init__( self, remote_url: str, *, hvcs_domain: str | None = None, hvcs_api_domain: str | None = None, token: str | None = None, allow_insecure: bool = False, **_kwargs: Any, ) -> None: super().__init__(remote_url) self.token = token auth = None if not self.token else TokenAuth(self.token) self.session = build_requests_session(auth=auth) # ref: https://docs.github.com/en/actions/reference/environment-variables#default-environment-variables domain_url_str = ( hvcs_domain or os.getenv("GITHUB_SERVER_URL", "") or f"https://{self.DEFAULT_DOMAIN}" ) domain_url = self._normalize_url( domain_url_str, allow_insecure=allow_insecure, ) # Strip any auth, query or fragment from the domain self._hvcs_domain = parse_url( Url( scheme=domain_url.scheme, host=domain_url.host, port=domain_url.port, path=str(PurePosixPath(domain_url.path or "/")), ).url.rstrip("/") ) # ref: https://docs.github.com/en/actions/reference/environment-variables#default-environment-variables api_url_str = ( hvcs_api_domain or os.getenv("GITHUB_API_URL", "") or self._derive_api_url_from_base_domain() ) api_domain_parts = self._normalize_url( api_url_str, allow_insecure=allow_insecure, ) # As GitHub Enterprise Cloud and GitHub Enterprise Server (on-prem) have different api locations # lets check what we have been given and set the api url accordingly # NOTE: Github Server (on premise) uses a path prefix '/api/v3' for the api # while GitHub Enterprise Cloud uses a separate subdomain as the base is_github_cloud = bool(self.hvcs_domain.url == f"https://{self.DEFAULT_DOMAIN}") if ( is_github_cloud and hvcs_api_domain and api_domain_parts.url not in Github.DEFAULT_API_URL_CLOUD ): # Api was provied but is not a subset of the expected one, raise an error # we check for a subset because the user may not have provided the full api path # but the correct domain. If they didn't, then we are erroring out here. raise ValueError( f"Invalid api domain {api_domain_parts.url} for GitHub Enterprise Cloud. " f"Expected {Github.DEFAULT_API_URL_CLOUD}." ) # Set the api url to the default cloud one if we are on cloud, otherwise # use the verified api domain for a on-prem server self._api_url = parse_url( Github.DEFAULT_API_URL_CLOUD if is_github_cloud else Url( # Strip any auth, query or fragment from the domain scheme=api_domain_parts.scheme, host=api_domain_parts.host, port=api_domain_parts.port, path=str( PurePosixPath( # pass any custom server prefix path but ensure we don't # double up the api path in the case the user provided it str.replace( api_domain_parts.path or "", self.DEFAULT_API_PATH_ONPREM, "", ).lstrip("/") or "/", # apply the on-prem api path self.DEFAULT_API_PATH_ONPREM.lstrip("/"), ) ), ).url.rstrip("/") ) def _derive_api_url_from_base_domain(self) -> Url: return parse_url( Url( # infer from Domain url and prepend the default api subdomain **{ **self.hvcs_domain._asdict(), "host": self.hvcs_domain.host, "path": str( PurePosixPath( str.lstrip(self.hvcs_domain.path or "", "/") or "/", self.DEFAULT_API_PATH_ONPREM.lstrip("/"), ) ), } ).url.rstrip("/") ) @lru_cache(maxsize=1) def _get_repository_owner_and_name(self) -> tuple[str, str]: # Github actions context if "GITHUB_REPOSITORY" in os.environ: log.debug("getting repository owner and name from environment variables") owner, name = os.environ["GITHUB_REPOSITORY"].rsplit("/", 1) return owner, name return super()._get_repository_owner_and_name() @logged_function(log) def create_release( self, tag: str, release_notes: str, prerelease: bool = False, assets: list[str] | None = None, noop: bool = False, ) -> int: """ Create a new release REF: https://docs.github.com/rest/reference/repos#create-a-release :param tag: Tag to create release for :param release_notes: The release notes for this version :param prerelease: Whether or not this release should be created as a prerelease :param assets: a list of artifacts to upload to the release :return: the ID of the release """ if noop: noop_report( str.join( " ", [ f"would have created a release for tag {tag}", "with the following notes:\n", release_notes, ], ) ) if assets: noop_report( str.join( "\n", [ "would have uploaded the following assets to the release:", *assets, ], ) ) return -1 log.info("Creating release for tag %s", tag) releases_endpoint = self.create_api_url( endpoint=f"/repos/{self.owner}/{self.repo_name}/releases", ) response = self.session.post( releases_endpoint, json={ "tag_name": tag, "name": tag, "body": release_notes, "draft": False, "prerelease": prerelease, }, ) # Raise an error if the request was not successful response.raise_for_status() try: release_id: int = response.json()["id"] log.info("Successfully created release with ID: %s", release_id) except JSONDecodeError as err: raise UnexpectedResponse("Unreadable json response") from err except KeyError as err: raise UnexpectedResponse("JSON response is missing an id") from err errors = [] for asset in assets or []: log.info("Uploading asset %s", asset) try: self.upload_release_asset(release_id, asset) except HTTPError as err: errors.append( AssetUploadError(f"Failed asset upload for {asset}").with_traceback( err.__traceback__ ) ) if len(errors) < 1: return release_id for error in errors: log.exception(error) raise IncompleteReleaseError( f"Failed to upload asset{'s' if len(errors) > 1 else ''} to release!" ) @logged_function(log) @suppress_not_found def get_release_id_by_tag(self, tag: str) -> int | None: """ Get a release by its tag name https://docs.github.com/rest/reference/repos#get-a-release-by-tag-name :param tag: Tag to get release for :return: ID of release, if found, else None """ tag_endpoint = self.create_api_url( endpoint=f"/repos/{self.owner}/{self.repo_name}/releases/tags/{tag}", ) response = self.session.get(tag_endpoint) # Raise an error if the request was not successful response.raise_for_status() try: data = response.json() return data["id"] except JSONDecodeError as err: raise UnexpectedResponse("Unreadable json response") from err except KeyError as err: raise UnexpectedResponse("JSON response is missing an id") from err @logged_function(log) def edit_release_notes(self, release_id: int, release_notes: str) -> int: """ Edit a release with updated change notes https://docs.github.com/rest/reference/repos#update-a-release :param release_id: ID of release to update :param release_notes: The release notes for this version :return: The ID of the release that was edited """ log.info("Updating release %s", release_id) release_endpoint = self.create_api_url( endpoint=f"/repos/{self.owner}/{self.repo_name}/releases/{release_id}", ) response = self.session.post( release_endpoint, json={"body": release_notes}, ) # Raise an error if the update was unsuccessful response.raise_for_status() return release_id @logged_function(log) def create_or_update_release( self, tag: str, release_notes: str, prerelease: bool = False ) -> int: """ Post release changelog :param tag: The version number :param release_notes: The release notes for this version :param prerelease: Whether or not this release should be created as a prerelease :return: The status of the request """ log.info("Creating release for %s", tag) try: return self.create_release(tag, release_notes, prerelease) except HTTPError as err: log.debug("error creating release: %s", err) log.debug("looking for an existing release to update") release_id = self.get_release_id_by_tag(tag) if release_id is None: raise ValueError( f"release id for tag {tag} not found, and could not be created" ) log.debug("Found existing release %s, updating", release_id) # If this errors we let it die return self.edit_release_notes(release_id, release_notes) @logged_function(log) @suppress_not_found def asset_upload_url(self, release_id: str) -> str | None: """ Get the correct upload url for a release https://docs.github.com/en/enterprise-server@3.5/rest/releases/releases#get-a-release :param release_id: ID of the release to upload to :return: URL to upload for a release if found, else None """ # https://docs.github.com/en/enterprise-server@3.5/rest/releases/assets#upload-a-release-asset release_url = self.create_api_url( endpoint=f"/repos/{self.owner}/{self.repo_name}/releases/{release_id}" ) response = self.session.get(release_url) response.raise_for_status() try: upload_url: str = response.json()["upload_url"] return upload_url.replace("{?name,label}", "") except JSONDecodeError as err: raise UnexpectedResponse("Unreadable json response") from err except KeyError as err: raise UnexpectedResponse( "JSON response is missing a key 'upload_url'" ) from err @logged_function(log) def upload_release_asset( self, release_id: int, file: str, label: str | None = None ) -> bool: """ Upload an asset to an existing release https://docs.github.com/rest/reference/repos#upload-a-release-asset :param release_id: ID of the release to upload to :param file: Path of the file to upload :param label: Optional custom label for this file :return: The status of the request """ url = self.asset_upload_url(release_id) if url is None: raise ValueError( "There is no associated url for uploading asset for release " f"{release_id}. Release url: " f"{self.api_url}/repos/{self.owner}/{self.repo_name}/releases/{release_id}" ) content_type = ( mimetypes.guess_type(file, strict=False)[0] or "application/octet-stream" ) with open(file, "rb") as data: response = self.session.post( url, params={"name": os.path.basename(file), "label": label}, headers={ "Content-Type": content_type, }, data=data.read(), ) # Raise an error if the upload was unsuccessful response.raise_for_status() log.debug( "Successfully uploaded %s to Github, url: %s, status code: %s", file, response.url, response.status_code, ) return True @logged_function(log) def upload_dists(self, tag: str, dist_glob: str) -> int: """ Upload distributions to a release :param tag: Version to upload for :param dist_glob: Path to the dist directory :return: The number of distributions successfully uploaded """ # Find the release corresponding to this version release_id = self.get_release_id_by_tag(tag=tag) if not release_id: log.warning("No release corresponds to tag %s, can't upload dists", tag) return 0 # Upload assets n_succeeded = 0 for file_path in ( f for f in glob.glob(dist_glob, recursive=True) if os.path.isfile(f) ): try: self.upload_release_asset(release_id, file_path) n_succeeded += 1 except HTTPError: # noqa: PERF203 log.exception("error uploading asset %s", file_path) return n_succeeded def remote_url(self, use_token: bool = True) -> str: """Get the remote url including the token for authentication if requested""" if not (self.token and use_token): log.info("requested to use token for push but no token set, ignoring...") return self._remote_url actor = os.getenv("GITHUB_ACTOR", None) return self.create_server_url( auth=f"{actor}:{self.token}" if actor else self.token, path=f"/{self.owner}/{self.repo_name}.git", ) def compare_url(self, from_rev: str, to_rev: str) -> str: """ Get the GitHub comparison link between two version tags. :param from_rev: The older version to compare. :param to_rev: The newer version to compare. :return: Link to view a comparison between the two versions. """ return self.create_repo_url(repo_path=f"/compare/{from_rev}...{to_rev}") def commit_hash_url(self, commit_hash: str) -> str: return self.create_repo_url(repo_path=f"/commit/{commit_hash}") def issue_url(self, issue_num: str | int) -> str: # Strips off any character prefix like '#' that usually exists if isinstance(issue_num, str) and ( match := regexp(r"(\d+)$").search(issue_num) ): try: issue_num = int(match.group(1)) except ValueError: return "" if isinstance(issue_num, int): return self.create_repo_url(repo_path=f"/issues/{issue_num}") return "" def pull_request_url(self, pr_number: str | int) -> str: # Strips off any character prefix like '#' that usually exists if isinstance(pr_number, str) and ( match := regexp(r"(\d+)$").search(pr_number) ): try: pr_number = int(match.group(1)) except ValueError: return "" if isinstance(pr_number, int): return self.create_repo_url(repo_path=f"/pull/{pr_number}") return "" def create_release_url(self, tag: str = "") -> str: tag_str = tag.strip() tag_path = f"tag/{tag_str}" if tag_str else "" return self.create_repo_url(repo_path=f"releases/{tag_path}") @staticmethod def format_w_official_vcs_name(format_str: str) -> str: if "%s" in format_str: return format_str % Github.OFFICIAL_NAME if "{}" in format_str: return format_str.format(Github.OFFICIAL_NAME) if "{vcs_name}" in format_str: return format_str.format(vcs_name=Github.OFFICIAL_NAME) return format_str def get_changelog_context_filters(self) -> tuple[Callable[..., Any], ...]: return ( self.create_server_url, self.create_repo_url, self.commit_hash_url, self.compare_url, self.issue_url, self.pull_request_url, self.create_release_url, self.format_w_official_vcs_name, ) RemoteHvcsBase.register(Github) python-semantic-release-9.21.0/src/semantic_release/hvcs/gitlab.py000066400000000000000000000245771475670435200252500ustar00rootroot00000000000000"""Helper code for interacting with a Gitlab remote VCS""" from __future__ import annotations import logging import os from functools import lru_cache from pathlib import PurePosixPath from re import compile as regexp from typing import TYPE_CHECKING import gitlab import gitlab.exceptions import gitlab.v4 import gitlab.v4.objects from urllib3.util.url import Url, parse_url from semantic_release.cli.util import noop_report from semantic_release.errors import UnexpectedResponse from semantic_release.helpers import logged_function from semantic_release.hvcs.remote_hvcs_base import RemoteHvcsBase from semantic_release.hvcs.util import suppress_not_found if TYPE_CHECKING: # pragma: no cover from typing import Any, Callable from gitlab.v4.objects import Project as GitLabProject log = logging.getLogger(__name__) # Globals log = logging.getLogger(__name__) class Gitlab(RemoteHvcsBase): """Gitlab HVCS interface for interacting with Gitlab repositories""" DEFAULT_ENV_TOKEN_NAME = "GITLAB_TOKEN" # noqa: S105 # purposefully not CI_JOB_TOKEN as it is not a personal access token, # It is missing the permission to push to the repository, but has all others (releases, packages, etc.) OFFICIAL_NAME = "GitLab" DEFAULT_DOMAIN = "gitlab.com" def __init__( self, remote_url: str, *, hvcs_domain: str | None = None, token: str | None = None, allow_insecure: bool = False, **_kwargs: Any, ) -> None: super().__init__(remote_url) self.token = token self.project_namespace = f"{self.owner}/{self.repo_name}" self._project: GitLabProject | None = None domain_url = self._normalize_url( hvcs_domain or os.getenv("CI_SERVER_URL", "") or f"https://{self.DEFAULT_DOMAIN}", allow_insecure=allow_insecure, ) # Strip any auth, query or fragment from the domain self._hvcs_domain = parse_url( Url( scheme=domain_url.scheme, host=domain_url.host, port=domain_url.port, path=str(PurePosixPath(domain_url.path or "/")), ).url.rstrip("/") ) self._client = gitlab.Gitlab(self.hvcs_domain.url, private_token=self.token) self._api_url = parse_url(self._client.api_url) @property def project(self) -> GitLabProject: if self._project is None: self._project = self._client.projects.get(self.project_namespace) return self._project @lru_cache(maxsize=1) def _get_repository_owner_and_name(self) -> tuple[str, str]: """ Get the repository owner and name from GitLab CI environment variables, if available, otherwise from parsing the remote url """ if "CI_PROJECT_NAMESPACE" in os.environ and "CI_PROJECT_NAME" in os.environ: log.debug("getting repository owner and name from environment variables") return os.environ["CI_PROJECT_NAMESPACE"], os.environ["CI_PROJECT_NAME"] return super()._get_repository_owner_and_name() @logged_function(log) def create_release( self, tag: str, release_notes: str, prerelease: bool = False, # noqa: ARG002 assets: list[str] | None = None, # noqa: ARG002 noop: bool = False, ) -> str: """ Create a release in a remote VCS, adding any release notes and assets to it :param tag: The tag to create the release for :param release_notes: The changelog description for this version only :param prerelease: This parameter has no effect in GitLab :param assets: A list of paths to files to upload as assets (TODO: not implemented) :param noop: If True, do not perform any actions, only log intents :return: The tag of the release :raises: GitlabAuthenticationError: If authentication is not correct :raises: GitlabCreateError: If the server cannot perform the request """ if noop: noop_report(f"would have created a release for tag {tag}") return tag log.info("Creating release for %s", tag) # ref: https://docs.gitlab.com/ee/api/releases/index.html#create-a-release self.project.releases.create( { "name": tag, "tag_name": tag, "tag_message": tag, "description": release_notes, } ) log.info("Successfully created release for %s", tag) return tag @logged_function(log) @suppress_not_found def get_release_by_tag(self, tag: str) -> gitlab.v4.objects.ProjectRelease | None: """ Get a release by its tag name. :param tag: The tag name to get the release for :return: gitlab.v4.objects.ProjectRelease or None if not found :raises: gitlab.exceptions.GitlabAuthenticationError: If the user is not authenticated """ try: return self.project.releases.get(tag) except gitlab.exceptions.GitlabGetError: log.debug("Release %s not found", tag) return None except KeyError as err: raise UnexpectedResponse("JSON response is missing commit.id") from err @logged_function(log) def edit_release_notes( # type: ignore[override] self, release: gitlab.v4.objects.ProjectRelease, release_notes: str, ) -> str: """ Update the release notes for a given release. :param release: The release object to update :param release_notes: The new release notes :return: The release id :raises: GitlabAuthenticationError: If authentication is not correct :raises: GitlabUpdateError: If the server cannot perform the request """ log.info( "Updating release %s [%s]", release.name, release.attributes.get("commit", {}).get("id"), ) release.description = release_notes release.save() return str(release.get_id()) @logged_function(log) def create_or_update_release( self, tag: str, release_notes: str, prerelease: bool = False ) -> str: """ Create or update a release for the given tag in a remote VCS. :param tag: The tag to create or update the release for :param release_notes: The changelog description for this version only :param prerelease: This parameter has no effect in GitLab :return: The release id :raises ValueError: If the release could not be created or updated :raises gitlab.exceptions.GitlabAuthenticationError: If the user is not authenticated :raises GitlabUpdateError: If the server cannot perform the request """ try: return self.create_release( tag=tag, release_notes=release_notes, prerelease=prerelease ) except gitlab.GitlabCreateError: log.info( "New release %s could not be created for project %s", tag, self.project_namespace, ) if (release_obj := self.get_release_by_tag(tag)) is None: raise ValueError( f"release for tag {tag} could not be found, and could not be created" ) log.debug( "Found existing release commit %s, updating", release_obj.commit.get("id") ) # If this errors we let it die return self.edit_release_notes( release=release_obj, release_notes=release_notes, ) def remote_url(self, use_token: bool = True) -> str: """Get the remote url including the token for authentication if requested""" if not (self.token and use_token): return self._remote_url return self.create_server_url( auth=f"gitlab-ci-token:{self.token}", path=f"{self.project_namespace}.git", ) def compare_url(self, from_rev: str, to_rev: str) -> str: return self.create_repo_url(repo_path=f"/-/compare/{from_rev}...{to_rev}") def commit_hash_url(self, commit_hash: str) -> str: return self.create_repo_url(repo_path=f"/-/commit/{commit_hash}") def issue_url(self, issue_num: str | int) -> str: # Strips off any character prefix like '#' that usually exists if isinstance(issue_num, str) and ( match := regexp(r"(\d+)$").search(issue_num) ): try: issue_num = int(match.group(1)) except ValueError: return "" if isinstance(issue_num, int): return self.create_repo_url(repo_path=f"/-/issues/{issue_num}") return "" def merge_request_url(self, mr_number: str | int) -> str: # Strips off any character prefix like '!' that usually exists if isinstance(mr_number, str) and ( match := regexp(r"(\d+)$").search(mr_number) ): try: mr_number = int(match.group(1)) except ValueError: return "" if isinstance(mr_number, int): return self.create_repo_url(repo_path=f"/-/merge_requests/{mr_number}") return "" def pull_request_url(self, pr_number: str | int) -> str: return self.merge_request_url(mr_number=pr_number) def upload_dists(self, tag: str, dist_glob: str) -> int: return super().upload_dists(tag, dist_glob) def create_release_url(self, tag: str = "") -> str: tag_str = tag.strip() return self.create_repo_url(repo_path=f"/-/releases/{tag_str}") @staticmethod def format_w_official_vcs_name(format_str: str) -> str: if "%s" in format_str: return format_str % Gitlab.OFFICIAL_NAME if "{}" in format_str: return format_str.format(Gitlab.OFFICIAL_NAME) if "{vcs_name}" in format_str: return format_str.format(vcs_name=Gitlab.OFFICIAL_NAME) return format_str def get_changelog_context_filters(self) -> tuple[Callable[..., Any], ...]: return ( self.create_server_url, self.create_repo_url, self.commit_hash_url, self.compare_url, self.issue_url, self.merge_request_url, self.pull_request_url, self.create_release_url, self.format_w_official_vcs_name, ) RemoteHvcsBase.register(Gitlab) python-semantic-release-9.21.0/src/semantic_release/hvcs/remote_hvcs_base.py000066400000000000000000000137311475670435200273040ustar00rootroot00000000000000"""Common functionality and interface for interacting with Git remote VCS""" from __future__ import annotations import logging from abc import ABCMeta, abstractmethod from pathlib import PurePosixPath from typing import TYPE_CHECKING from urllib3.util.url import Url, parse_url from semantic_release.hvcs import HvcsBase if TYPE_CHECKING: # pragma: no cover from typing import Any # Globals logger = logging.getLogger(__name__) class RemoteHvcsBase(HvcsBase, metaclass=ABCMeta): """ Interface for subclasses interacting with a remote VCS This abstract class is defined to provide common helper functions and a set of basic methods that all remote VCS environments usually support. If the remote vcs implementation (via subclass) does not support a functionality then it can just call super()'s method which defaults as a non-supported log message and empty results. This is more straightforward than checking for NotImplemented around every function call in the core library code. """ DEFAULT_ENV_TOKEN_NAME = "HVCS_TOKEN" # noqa: S105 def __init__(self, remote_url: str, *_args: Any, **_kwargs: Any) -> None: super().__init__(remote_url) self._hvcs_domain: Url | None = None self._api_url: Url | None = None @property def hvcs_domain(self) -> Url: if self._hvcs_domain is None: raise RuntimeError("Property 'hvcs_domain' was used before it was set!") return self._hvcs_domain @property def api_url(self) -> Url: if self._api_url is None: raise RuntimeError("Property 'api_url' was used before it was set!") return self._api_url @abstractmethod def upload_dists(self, tag: str, dist_glob: str) -> int: """ Upload built distributions to a release on a remote VCS that supports such uploads """ self._not_supported(self.upload_dists.__name__) return 0 @abstractmethod def create_release( self, tag: str, release_notes: str, prerelease: bool = False, assets: list[str] | None = None, noop: bool = False, ) -> int | str: """ Create a release in a remote VCS, if supported Which includes uploading any assets as part of the release """ self._not_supported(self.create_release.__name__) return -1 @abstractmethod def create_or_update_release( self, tag: str, release_notes: str, prerelease: bool = False ) -> int | str: """ Create or update a release for the given tag in a remote VCS, attaching the given changelog, if supported """ self._not_supported(self.create_or_update_release.__name__) return -1 def create_server_url( self, path: str, auth: str | None = None, query: str | None = None, fragment: str | None = None, ) -> str: # Ensure any path prefix is transfered but not doubled up on the derived url return self._derive_url( self.hvcs_domain, path=f"{self.hvcs_domain.path or ''}/{path.lstrip(self.hvcs_domain.path)}", auth=auth, query=query, fragment=fragment, ) def create_repo_url( self, repo_path: str, query: str | None = None, fragment: str | None = None, ) -> str: return self.create_server_url( path=f"/{self.owner}/{self.repo_name}/{repo_path}", query=query, fragment=fragment, ) def create_api_url( self, endpoint: str, auth: str | None = None, query: str | None = None, fragment: str | None = None, ) -> str: # Ensure any api path prefix is transfered but not doubled up on the derived api url return self._derive_url( self.api_url, path=f"{self.api_url.path or ''}/{endpoint.lstrip(self.api_url.path)}", auth=auth, query=query, fragment=fragment, ) @staticmethod def _derive_url( base_url: Url, path: str, auth: str | None = None, query: str | None = None, fragment: str | None = None, ) -> str: overrides = dict( filter( lambda x: x[1] is not None, { "auth": auth, "path": str(PurePosixPath("/", path.lstrip("/"))), "query": query, "fragment": fragment, }.items(), ) ) return Url( **{ **base_url._asdict(), **overrides, } ).url.rstrip("/") @staticmethod def _validate_url_scheme(url: Url, allow_insecure: bool = False) -> None: if url.scheme == "http" and not allow_insecure: raise ValueError("Insecure connections are currently disabled.") if url.scheme not in ["http", "https"]: raise ValueError( f"Invalid scheme {url.scheme} for {url.host}. " "Only http and https are supported." ) @staticmethod def _normalize_url(url: Url | str, allow_insecure: bool = False) -> Url: """ Function to ensure url scheme is populated & allowed Raises ------ TypeError: when url parameter is not a string or parsable url ValueError: when the url scheme is not http or https """ tgt_url = parse_url(url) if isinstance(url, str) else url if not isinstance(tgt_url, Url): raise TypeError( f"Invalid url type ({type(tgt_url)}) received, expected Url or string" ) if not tgt_url.scheme: new_scheme = "http" if allow_insecure else "https" tgt_url = Url(**{**tgt_url._asdict(), "scheme": new_scheme}) RemoteHvcsBase._validate_url_scheme(tgt_url, allow_insecure=allow_insecure) return tgt_url python-semantic-release-9.21.0/src/semantic_release/hvcs/token_auth.py000066400000000000000000000012271475670435200261320ustar00rootroot00000000000000from requests import PreparedRequest from requests.auth import AuthBase class TokenAuth(AuthBase): """ requests Authentication for token based authorization. This allows us to attach the Authorization header with a token to a session. """ def __init__(self, token: str) -> None: self.token = token def __eq__(self, other: object) -> bool: return self.token == getattr(other, "token", None) def __ne__(self, other: object) -> bool: return not self == other def __call__(self, req: PreparedRequest) -> PreparedRequest: req.headers["Authorization"] = f"token {self.token}" return req python-semantic-release-9.21.0/src/semantic_release/hvcs/util.py000066400000000000000000000054771475670435200247610ustar00rootroot00000000000000from __future__ import annotations import logging from functools import wraps from typing import TYPE_CHECKING, Any, Callable, TypeVar from requests import HTTPError, Session from requests.adapters import HTTPAdapter from requests.packages.urllib3.util.retry import Retry # type: ignore[import] if TYPE_CHECKING: # pragma: no cover from semantic_release.hvcs.token_auth import TokenAuth logger = logging.getLogger(__name__) def build_requests_session( raise_for_status: bool = True, retry: bool | int | Retry = True, auth: TokenAuth | None = None, ) -> Session: """ Create a requests session. :param raise_for_status: If True, a hook to invoke raise_for_status be installed :param retry: If true, it will use default Retry configuration. if an integer, it will use default Retry configuration with given integer as total retry count. if Retry instance, it will use this instance. :param auth: Optional TokenAuth instance to be used to provide the Authorization header to the session :return: configured requests Session """ session = Session() if raise_for_status: session.hooks = {"response": [lambda r, *_, **__: r.raise_for_status()]} if retry: if isinstance(retry, bool): retry = Retry() elif isinstance(retry, int): retry = Retry(retry) elif not isinstance(retry, Retry): raise ValueError("retry should be a bool, int or Retry instance.") adapter = HTTPAdapter(max_retries=retry) session.mount("http://", adapter) session.mount("https://", adapter) if auth: logger.debug("setting up default session authentication") session.auth = auth return session _R = TypeVar("_R") def suppress_http_error_for_codes( *codes: int, ) -> Callable[[Callable[..., _R]], Callable[..., _R | None]]: """ For the codes given, return a decorator that will suppress HTTPErrors that are raised from responses that came with one of those status codes. The function will return False instead of raising the HTTPError """ def _suppress_http_error_for_codes( func: Callable[..., _R], ) -> Callable[..., _R | None]: @wraps(func) def _wrapper(*a: Any, **kw: Any) -> _R | None: try: return func(*a, **kw) except HTTPError as err: if err.response and err.response.status_code in codes: logger.warning( "%s received response %s: %s", func.__qualname__, err.response.status_code, str(err), ) return None return _wrapper return _suppress_http_error_for_codes suppress_not_found = suppress_http_error_for_codes(404) python-semantic-release-9.21.0/src/semantic_release/py.typed000066400000000000000000000000001475670435200241370ustar00rootroot00000000000000python-semantic-release-9.21.0/src/semantic_release/version/000077500000000000000000000000001475670435200241375ustar00rootroot00000000000000python-semantic-release-9.21.0/src/semantic_release/version/__init__.py000066400000000000000000000004161475670435200262510ustar00rootroot00000000000000import semantic_release.version.declaration as declaration from semantic_release.version.algorithm import ( next_version, tags_and_versions, ) from semantic_release.version.translator import VersionTranslator from semantic_release.version.version import Version python-semantic-release-9.21.0/src/semantic_release/version/algorithm.py000066400000000000000000000404701475670435200265040ustar00rootroot00000000000000from __future__ import annotations import logging from contextlib import suppress from functools import reduce from queue import LifoQueue from typing import TYPE_CHECKING, Iterable from semantic_release.commit_parser import ParsedCommit from semantic_release.commit_parser.token import ParseError from semantic_release.const import DEFAULT_VERSION from semantic_release.enums import LevelBump, SemanticReleaseLogLevels from semantic_release.errors import InternalError, InvalidVersion from semantic_release.helpers import validate_types_in_sequence if TYPE_CHECKING: # pragma: no cover from typing import Sequence from git.objects.commit import Commit from git.refs.tag import Tag from git.repo.base import Repo from semantic_release.commit_parser import ( CommitParser, ParseResult, ParserOptions, ) from semantic_release.version.translator import VersionTranslator from semantic_release.version.version import Version logger = logging.getLogger(__name__) def tags_and_versions( tags: Iterable[Tag], translator: VersionTranslator ) -> list[tuple[Tag, Version]]: """ Return a list of 2-tuples, where each element is a tuple (tag, version) from the tags in the Git repo and their corresponding `Version` according to `Version.from_tag`. The returned list is sorted according to semver ordering rules. Tags which are not matched by `translator` are ignored. """ ts_and_vs: list[tuple[Tag, Version]] = [] for tag in tags: try: version = translator.from_tag(tag.name) except (NotImplementedError, InvalidVersion) as e: logger.warning( "Couldn't parse tag %s as as Version: %s", tag.name, str(e), exc_info=logger.isEnabledFor(logging.DEBUG), ) continue if version: ts_and_vs.append((tag, version)) logger.info("found %s previous tags", len(ts_and_vs)) return sorted(ts_and_vs, reverse=True, key=lambda v: v[1]) def _traverse_graph_for_commits( head_commit: Commit, latest_release_tag_str: str = "", ) -> Sequence[Commit]: # Depth-first search def dfs(start_commit: Commit, stop_nodes: set[Commit]) -> Sequence[Commit]: # Create a stack for DFS stack: LifoQueue[Commit] = LifoQueue() # Create a set to store visited graph nodes (commit objects in this case) visited: set[Commit] = set() # Initialize the result commits: list[Commit] = [] # Add the source node in the queue to start the search stack.put(start_commit) # Traverse the git history capturing each commit found before it reaches a stop node while not stack.empty(): if (node := stack.get()) in visited or node in stop_nodes: continue visited.add(node) commits.append(node) # Add all parent commits to the stack from left to right so that the rightmost is popped first # as the left side is generally the merged into branch for parent in node.parents: stack.put(parent) return commits # Run a Depth First Search to find all the commits since the last release return dfs( start_commit=head_commit, stop_nodes=set( head_commit.repo.iter_commits(latest_release_tag_str) if latest_release_tag_str else [] ), ) def _increment_version( latest_version: Version, latest_full_version: Version, level_bump: LevelBump, prerelease: bool, prerelease_token: str, major_on_zero: bool, allow_zero_version: bool, ) -> Version: """ Using the given versions, along with a given `level_bump`, increment to the next version according to whether or not this is a prerelease. `latest_version` is the most recent version released from this branch's history. `latest_full_version`, the most recent full release (i.e. not a prerelease) in this branch's history. `latest_version` and `latest_full_version` can be the same, but aren't necessarily. """ local_vars = list(locals().items()) logger.log( SemanticReleaseLogLevels.SILLY, "_increment_version: %s", str.join(", ", [f"{k} = {v}" for k, v in local_vars]), ) # Handle variations where the latest version is 0.x.x if latest_version.major == 0: if not allow_zero_version: # Set up default version to be 1.0.0 if currently 0.x.x which means a commented # breaking change is not required to bump to 1.0.0 logger.debug( "Bumping major version as 0.x.x versions are disabled because of allow_zero_version=False" ) level_bump = LevelBump.MAJOR elif not major_on_zero: # if we are a 0.x.y release and have set `major_on_zero`, # breaking changes should increment the minor digit # Correspondingly, we reduce the level that we increment the # version by. logger.debug( "reducing version increment due to 0. version and major_on_zero=False" ) level_bump = min(level_bump, LevelBump.MINOR) logger.debug( "prerelease=%s and the latest version %s %s prerelease", prerelease, latest_version, "is a" if latest_version.is_prerelease else "is not a", ) if level_bump == LevelBump.NO_RELEASE: raise ValueError("level_bump must be at least PRERELEASE_REVISION") if level_bump == LevelBump.PRERELEASE_REVISION and not latest_version.is_prerelease: raise ValueError( "Cannot increment a non-prerelease version with a prerelease level bump" ) # assume we always want to increment the version that is the latest in the branch's history base_version = latest_version # if the current version is a prerelease & we want a new prerelease, then # figure out if we need to bump the prerelease revision or start a new prerelease if latest_version.is_prerelease: # find the change since the last full release because if the current version is a prerelease # then we need to predict properly the next full version diff_with_last_released_version = latest_version - latest_full_version logger.debug( "the diff b/w the latest version '%s' and the latest full release version '%s' is: %s", latest_version, latest_full_version, diff_with_last_released_version, ) # Since the difference is less than or equal to the level bump and we want a new prerelease, # we can abort early and just increment the revision if level_bump <= diff_with_last_released_version: # 6a ii) if level_bump <= the level bump introduced by the previous tag (latest_version) if prerelease: logger.debug( "there has already been at least a %s release since the last full release %s", level_bump, latest_full_version, ) logger.debug("Incrementing the prerelease revision...") new_revision = base_version.to_prerelease( token=prerelease_token, revision=( 1 if latest_version.prerelease_token != prerelease_token else (latest_version.prerelease_revision or 0) + 1 ), ) logger.debug("Incremented %s to %s", base_version, new_revision) return new_revision # When we don't want a prerelease, but the previous version is a prerelease that # had a greater bump than we currently are applying, choose the larger bump instead # as it consumes this bump logger.debug("Finalizing the prerelease version...") return base_version.finalize_version() # Fallthrough to handle all larger level bumps logger.debug( "this release has a greater bump than any change since the last full release, %s", latest_full_version, ) # Fallthrough, if we don't want a prerelease, or if we do but the level bump is greater # # because the current version is a prerelease, we must start from the last full version # Case 1: we identified that the level bump is greater than the change since # the last full release, this will also reset the prerelease revision # Case 2: we don't want a prerelease, so consider only the last full version in history base_version = latest_full_version # From the base version, we can now increment the version according to the level bump # regardless of the prerelease status as bump() handles the reset and pass through logger.debug("Bumping %s with a %s bump", base_version, level_bump) target_next_version = base_version.bump(level_bump) # Converting to/from a prerelease if necessary target_next_version = ( target_next_version.to_prerelease(token=prerelease_token) if prerelease else target_next_version.finalize_version() ) logger.debug("Incremented %s to %s", base_version, target_next_version) return target_next_version def next_version( repo: Repo, translator: VersionTranslator, commit_parser: CommitParser[ParseResult, ParserOptions], prerelease: bool = False, major_on_zero: bool = True, allow_zero_version: bool = True, ) -> Version: """ Evaluate the history within `repo`, and based on the tags and commits in the repo history, identify the next semantic version that should be applied to a release """ # Default initial version # Since the translator is configured by the user, we can't guarantee that it will # be able to parse the default version. So we first cast it to a tag using the default # value and the users configured tag format, then parse it back to a version object default_initial_version = translator.from_tag( translator.str_to_tag(DEFAULT_VERSION) ) if default_initial_version is None: # This should never happen, but if it does, it's a bug raise InternalError( "Translator was unable to parse the embedded default version" ) # Step 1. All tags, sorted descending by semver ordering rules all_git_tags_as_versions = tags_and_versions(repo.tags, translator) # Retrieve all commit hashes (regardless of merges) in the current branch's history from repo origin commit_hash_set = { commit.hexsha for commit in _traverse_graph_for_commits(head_commit=repo.active_branch.commit) } # Filter all releases that are not found in the current branch's history historic_versions: list[Version] = [] for tag, version in all_git_tags_as_versions: # TODO: move this to tags_and_versions() function? # Ignore the error that is raised when tag points to a Blob or Tree object rather # than a commit object (tags that point to tags that then point to commits are resolved automatically) with suppress(ValueError): if tag.commit.hexsha in commit_hash_set: historic_versions.append(version) # Step 2. Get the latest final release version in the history of the current branch # or fallback to the default 0.0.0 starting version value if none are found latest_full_release_version = next( filter( lambda version: not version.is_prerelease, historic_versions, ), default_initial_version, ) logger.info( f"The last full version in this branch's history was {latest_full_release_version}" if latest_full_release_version != default_initial_version else "No full releases found in this branch's history" ) # Step 3. Determine the latest release version in the history of the current branch # If we the desired result is a prerelease, we must determine if there was any previous # prerelease in the history of the current branch beyond the latest_full_release_version. # Important to note that, we only consider prereleases that are of the same prerelease token # as the basis of incrementing the prerelease revision. # If we are not looking for a prerelease, this is the same as the last full release. latest_version = ( latest_full_release_version if not prerelease else next( filter( lambda version: all( [ version.is_prerelease, version.prerelease_token == translator.prerelease_token, version >= latest_full_release_version, ] ), historic_versions, ), latest_full_release_version, # default ) ) logger.info("The latest release in this branch's history was %s", latest_version) # Step 4. Walk the git tree to find all commits that have been made since the last release commits_since_last_release = _traverse_graph_for_commits( head_commit=repo.active_branch.commit, latest_release_tag_str=( # NOTE: the default_initial_version should not actually exist on the repository (ie v0.0.0) # so we provide an empty tag string when there are no tags on the repository yet latest_version.as_tag() if latest_version != default_initial_version else "" ), ) logger.info( f"Found {len(commits_since_last_release)} commits since the last release!" if len(commits_since_last_release) > 0 else "No commits found since the last release!" ) # Step 5. apply the parser to each commit in the history (could return multiple results per commit) parsed_results = list(map(commit_parser.parse, commits_since_last_release)) # Step 5A. Validation type check for the parser results (important because of possible custom parsers) for parsed_result in parsed_results: if not any( ( isinstance(parsed_result, (ParseError, ParsedCommit)), type(parsed_result) == list and validate_types_in_sequence( parsed_result, (ParseError, ParsedCommit) ), type(parsed_result) == tuple and validate_types_in_sequence( parsed_result, (ParseError, ParsedCommit) ), ) ): raise TypeError("Unexpected type returned from commit_parser.parse") # Step 5B. Accumulate all parsed results into a single list accounting for possible multiple results per commit consolidated_results: list[ParseResult] = reduce( lambda accumulated_results, p_results: [ *accumulated_results, *( # Cast to list if not already a list p_results if isinstance(p_results, list) or type(p_results) == tuple else [p_results] ), ], parsed_results, [], ) # Step 5C. Parse the commits to determine the bump level that should be applied parsed_levels: set[LevelBump] = { parsed_result.bump # type: ignore[union-attr] # too complex for type checkers for parsed_result in filter( # Filter out any non-ParsedCommit results (i.e. ParseErrors) lambda parsed_result: isinstance(parsed_result, ParsedCommit), consolidated_results, ) } logger.debug( "parsed the following distinct levels from the commits since the last release: %s", parsed_levels, ) level_bump = max(parsed_levels, default=LevelBump.NO_RELEASE) logger.info("The type of the next release release is: %s", level_bump) if all( [ level_bump is LevelBump.NO_RELEASE, latest_version.major != 0 or allow_zero_version, ] ): logger.info("No release will be made") return latest_version return _increment_version( latest_version=latest_version, latest_full_version=latest_full_release_version, level_bump=level_bump, prerelease=prerelease, prerelease_token=translator.prerelease_token, major_on_zero=major_on_zero, allow_zero_version=allow_zero_version, ) python-semantic-release-9.21.0/src/semantic_release/version/declaration.py000066400000000000000000000070661475670435200270070ustar00rootroot00000000000000from __future__ import annotations # TODO: Remove v10 from abc import ABC, abstractmethod from logging import getLogger from pathlib import Path from typing import TYPE_CHECKING from deprecated.sphinx import deprecated from semantic_release.version.declarations.enum import VersionStampType from semantic_release.version.declarations.i_version_replacer import IVersionReplacer from semantic_release.version.declarations.pattern import PatternVersionDeclaration from semantic_release.version.declarations.toml import TomlVersionDeclaration if TYPE_CHECKING: # pragma: no cover from semantic_release.version.version import Version # Globals __all__ = [ "IVersionReplacer", "VersionStampType", "PatternVersionDeclaration", "TomlVersionDeclaration", "VersionDeclarationABC", ] log = getLogger(__name__) @deprecated( version="9.20.0", reason=str.join( " ", [ "Refactored to composition paradigm using the new IVersionReplacer interface.", "This class will be removed in a future release", ], ), ) class VersionDeclarationABC(ABC): """ ABC for classes representing a location in which a version is declared somewhere within the source tree of the repository """ def __init__(self, path: Path | str, search_text: str) -> None: self.path = Path(path) if not self.path.exists(): raise FileNotFoundError(f"path {self.path.resolve()!r} does not exist") self.search_text = search_text self._content: str | None = None @property def content(self) -> str: """ The content of the source file in which the version is stored. This property is cached in the instance variable _content """ if self._content is None: log.debug( "No content stored, reading from source file %s", self.path.resolve() ) self._content = self.path.read_text() return self._content @content.deleter def content(self) -> None: log.debug("resetting instance-stored source file contents") self._content = None @abstractmethod def parse(self) -> set[Version]: """ Return a set of the versions which can be parsed from the file. Because a source can match in multiple places, this method returns a set of matches. Generally, there should only be one element in this set (i.e. even if the version is specified in multiple places, it should be the same version in each place), but enforcing that condition is not mandatory or expected. """ @abstractmethod def replace(self, new_version: Version) -> str: """ Update the versions. This method reads the underlying file, replaces each occurrence of the matched pattern, then writes the updated file. :param new_version: The new version number as a `Version` instance """ def write(self, content: str) -> None: r""" Write new content back to the source path. Use alongside .replace(): >>> class MyVD(VersionDeclarationABC): ... def parse(self): ... ... def replace(self, new_version: Version): ... ... def write(self, content: str): ... >>> new_version = Version.parse("1.2.3") >>> vd = MyVD("path", r"__version__ = (?P\d+\d+\d+)") >>> vd.write(vd.replace(new_version)) """ log.debug("writing content to %r", self.path.resolve()) self.path.write_text(content) self._content = None python-semantic-release-9.21.0/src/semantic_release/version/declarations/000077500000000000000000000000001475670435200266075ustar00rootroot00000000000000python-semantic-release-9.21.0/src/semantic_release/version/declarations/__init__.py000066400000000000000000000000001475670435200307060ustar00rootroot00000000000000python-semantic-release-9.21.0/src/semantic_release/version/declarations/enum.py000066400000000000000000000003641475670435200301300ustar00rootroot00000000000000from __future__ import annotations from enum import Enum class VersionStampType(str, Enum): """Enum for the type of version declaration""" # The version is a number format, e.g. 1.2.3 NUMBER_FORMAT = "nf" TAG_FORMAT = "tf" python-semantic-release-9.21.0/src/semantic_release/version/declarations/i_version_replacer.py000066400000000000000000000045601475670435200330400ustar00rootroot00000000000000from __future__ import annotations from abc import ABCMeta, abstractmethod from typing import TYPE_CHECKING if TYPE_CHECKING: # pragma: no cover from pathlib import Path from semantic_release.version.version import Version class IVersionReplacer(metaclass=ABCMeta): """ Interface for subclasses that replace a version string in a source file. Methods generally have a base implementation are implemented here but likely just provide a not-supported message but return gracefully This class cannot be instantiated directly but must be inherited from and implement the designated abstract methods. """ @classmethod def __subclasshook__(cls, subclass: type) -> bool: # Validate that the subclass implements all of the abstract methods. # This supports isinstance and issubclass checks. return bool( cls is IVersionReplacer and all( bool(hasattr(subclass, method) and callable(getattr(subclass, method))) for method in IVersionReplacer.__abstractmethods__ ) ) @abstractmethod def parse(self) -> set[Version]: """ Return a set of the versions which can be parsed from the file. Because a source can match in multiple places, this method returns a set of matches. Generally, there should only be one element in this set (i.e. even if the version is specified in multiple places, it should be the same version in each place), but enforcing that condition is not mandatory or expected. """ raise NotImplementedError # pragma: no cover @abstractmethod def replace(self, new_version: Version) -> str: """ Replace the version in the source content with `new_version`, and return the updated content. :param new_version: The new version number as a `Version` instance """ raise NotImplementedError # pragma: no cover @abstractmethod def update_file_w_version( self, new_version: Version, noop: bool = False ) -> Path | None: """ This method reads the underlying file, replaces each occurrence of the matched pattern, then writes the updated file. :param new_version: The new version number as a `Version` instance """ raise NotImplementedError # pragma: no cover python-semantic-release-9.21.0/src/semantic_release/version/declarations/pattern.py000066400000000000000000000201471475670435200306420ustar00rootroot00000000000000from __future__ import annotations from logging import getLogger from pathlib import Path from re import ( MULTILINE, compile as regexp, error as RegExpError, # noqa: N812 escape as regex_escape, ) from typing import TYPE_CHECKING from deprecated.sphinx import deprecated from semantic_release.cli.util import noop_report from semantic_release.const import SEMVER_REGEX from semantic_release.version.declarations.enum import VersionStampType from semantic_release.version.declarations.i_version_replacer import IVersionReplacer from semantic_release.version.version import Version if TYPE_CHECKING: # pragma: no cover from re import Match log = getLogger(__name__) class VersionSwapper: """Callable to replace a version number in a string with a new version number.""" def __init__(self, new_version_str: str, group_match_name: str) -> None: self.version_str = new_version_str self.group_match_name = group_match_name def __call__(self, match: Match[str]) -> str: i, j = match.span() ii, jj = match.span(self.group_match_name) return f"{match.string[i:ii]}{self.version_str}{match.string[jj:j]}" class PatternVersionDeclaration(IVersionReplacer): """ VersionDeclarationABC implementation representing a version number in a particular file. The version number is identified by a regular expression, which should be provided in `search_text`. """ _VERSION_GROUP_NAME = "version" def __init__( self, path: Path | str, search_text: str, stamp_format: VersionStampType ) -> None: self._content: str | None = None self._path = Path(path).resolve() self._stamp_format = stamp_format try: self._search_pattern = regexp(search_text, flags=MULTILINE) except RegExpError as err: raise ValueError( f"Invalid regular expression for search text: {search_text!r}" ) from err if self._VERSION_GROUP_NAME not in self._search_pattern.groupindex: raise ValueError( str.join( " ", [ f"Invalid search text {search_text!r}; must use", f"'{self._VERSION_GROUP_NAME}' as a named group, for example", f"(?P<{self._VERSION_GROUP_NAME}>...) . For more info on named", "groups see https://docs.python.org/3/library/re.html", ], ) ) @property def content(self) -> str: """A cached property that stores the content of the configured source file.""" if self._content is None: log.debug("No content stored, reading from source file %s", self._path) if not self._path.exists(): raise FileNotFoundError(f"path {self._path!r} does not exist") self._content = self._path.read_text() return self._content @content.deleter def content(self) -> None: self._content = None @deprecated( version="9.20.0", reason="Function is unused and will be removed in a future release", ) def parse(self) -> set[Version]: # pragma: no cover """ Return the versions matching this pattern. Because a pattern can match in multiple places, this method returns a set of matches. Generally, there should only be one element in this set (i.e. even if the version is specified in multiple places, it should be the same version in each place), but it falls on the caller to check for this condition. """ versions = { Version.parse(m.group(self._VERSION_GROUP_NAME)) for m in self._search_pattern.finditer(self.content) } log.debug( "Parsing current version: path=%r pattern=%r num_matches=%s", self._path.resolve(), self._search_pattern, len(versions), ) return versions def replace(self, new_version: Version) -> str: """ Replace the version in the source content with `new_version`, and return the updated content. :param new_version: The new version number as a `Version` instance """ new_content, n_matches = self._search_pattern.subn( VersionSwapper( new_version_str=( new_version.as_tag() if self._stamp_format == VersionStampType.TAG_FORMAT else str(new_version) ), group_match_name=self._VERSION_GROUP_NAME, ), self.content, ) log.debug( "path=%r pattern=%r num_matches=%r", self._path, self._search_pattern, n_matches, ) return new_content def update_file_w_version( self, new_version: Version, noop: bool = False ) -> Path | None: if noop: if not self._path.exists(): noop_report( f"FILE NOT FOUND: cannot stamp version in non-existent file {self._path}", ) return None if len(self._search_pattern.findall(self.content)) < 1: noop_report( f"VERSION PATTERN NOT FOUND: no version to stamp in file {self._path}", ) return None return self._path new_content = self.replace(new_version) if new_content == self.content: return None self._path.write_text(new_content) del self.content return self._path @classmethod def from_string_definition( cls, replacement_def: str, tag_format: str ) -> PatternVersionDeclaration: """ create an instance of self from a string representing one item of the "version_variables" list in the configuration """ parts = replacement_def.split(":", maxsplit=2) if len(parts) <= 1: raise ValueError( f"Invalid replacement definition {replacement_def!r}, missing ':'" ) if len(parts) == 2: # apply default version_type of "number_format" (ie. "1.2.3") parts = [*parts, VersionStampType.NUMBER_FORMAT.value] path, variable, version_type = parts try: stamp_type = VersionStampType(version_type) except ValueError as err: raise ValueError( str.join( " ", [ "Invalid stamp type, must be one of:", str.join(", ", [e.value for e in VersionStampType]), ], ) ) from err # DEFAULT: naked (no v-prefixed) semver version value_replace_pattern_str = ( f"(?P<{cls._VERSION_GROUP_NAME}>{SEMVER_REGEX.pattern})" ) if version_type == VersionStampType.TAG_FORMAT.value: tag_parts = tag_format.strip().split(r"{version}", maxsplit=1) value_replace_pattern_str = str.join( "", [ f"(?P<{cls._VERSION_GROUP_NAME}>", regex_escape(tag_parts[0]), SEMVER_REGEX.pattern, (regex_escape(tag_parts[1]) if len(tag_parts) > 1 else ""), ")", ], ) search_text = str.join( "", [ # Supports optional matching quotations around variable name # Negative lookbehind to ensure we don't match part of a variable name f"""(?x)(?P['"])?(?['"])?{value_replace_pattern_str}(?P=quote2)?""", ], ) return cls(path, search_text, stamp_type) python-semantic-release-9.21.0/src/semantic_release/version/declarations/toml.py000066400000000000000000000115211475670435200301340ustar00rootroot00000000000000from __future__ import annotations from logging import getLogger from pathlib import Path from typing import Any, Dict, cast import tomlkit from deprecated.sphinx import deprecated from dotty_dict import Dotty from semantic_release.cli.util import noop_report from semantic_release.version.declarations.enum import VersionStampType from semantic_release.version.declarations.i_version_replacer import IVersionReplacer from semantic_release.version.version import Version # globals log = getLogger(__name__) class TomlVersionDeclaration(IVersionReplacer): def __init__( self, path: Path | str, search_text: str, stamp_format: VersionStampType ) -> None: self._content: str | None = None self._path = Path(path).resolve() self._stamp_format = stamp_format self._search_text = search_text @property def content(self) -> str: """A cached property that stores the content of the configured source file.""" if self._content is None: log.debug("No content stored, reading from source file %s", self._path) if not self._path.exists(): raise FileNotFoundError(f"path {self._path!r} does not exist") self._content = self._path.read_text() return self._content @content.deleter def content(self) -> None: self._content = None @deprecated( version="9.20.0", reason="Function is unused and will be removed in a future release", ) def parse(self) -> set[Version]: # pragma: no cover """Look for the version in the source content""" content = self._load() maybe_version: str = content.get(self._search_text) # type: ignore[return-value] if maybe_version is not None: log.debug( "Found a key %r that looks like a version (%r)", self._search_text, maybe_version, ) valid_version = Version.parse(maybe_version) return {valid_version} if valid_version else set() # Maybe in future raise error if not found? return set() def replace(self, new_version: Version) -> str: """ Replace the version in the source content with `new_version`, and return the updated content. """ content = self._load() if self._search_text in content: log.info( "found %r in source file contents, replacing with %s", self._search_text, new_version, ) content[self._search_text] = ( new_version.as_tag() if self._stamp_format == VersionStampType.TAG_FORMAT else str(new_version) ) return tomlkit.dumps(cast(Dict[str, Any], content)) def _load(self) -> Dotty: """Load the content of the source file into a Dotty for easier searching""" return Dotty(tomlkit.loads(self.content)) def update_file_w_version( self, new_version: Version, noop: bool = False ) -> Path | None: if noop: if not self._path.exists(): noop_report( f"FILE NOT FOUND: cannot stamp version in non-existent file {self._path!r}", ) return None if self._search_text not in self._load(): noop_report( f"VERSION PATTERN NOT FOUND: no version to stamp in file {self._path!r}", ) return None return self._path new_content = self.replace(new_version) if new_content == self.content: return None self._path.write_text(new_content) del self.content return self._path @classmethod def from_string_definition(cls, replacement_def: str) -> TomlVersionDeclaration: """ create an instance of self from a string representing one item of the "version_toml" list in the configuration """ parts = replacement_def.split(":", maxsplit=2) if len(parts) <= 1: raise ValueError( f"Invalid TOML replacement definition {replacement_def!r}, missing ':'" ) if len(parts) == 2: # apply default version_type of "number_format" (ie. "1.2.3") parts = [*parts, VersionStampType.NUMBER_FORMAT.value] path, search_text, version_type = parts try: stamp_type = VersionStampType(version_type) except ValueError as err: raise ValueError( str.join( " ", [ "Invalid stamp type, must be one of:", str.join(", ", [e.value for e in VersionStampType]), ], ) ) from err return cls(path, search_text, stamp_type) python-semantic-release-9.21.0/src/semantic_release/version/translator.py000066400000000000000000000057521475670435200267130ustar00rootroot00000000000000from __future__ import annotations import logging import re from semantic_release.const import SEMVER_REGEX from semantic_release.helpers import check_tag_format from semantic_release.version.version import Version log = logging.getLogger(__name__) class VersionTranslator: """ Class to handle translation from Git tags into their corresponding Version instances. """ _VERSION_REGEX = SEMVER_REGEX @classmethod def _invert_tag_format_to_re(cls, tag_format: str) -> re.Pattern[str]: r""" Unpick the "tag_format" format string and create a regex which can be used to convert a tag to a version string. The following relationship should always hold true: >>> version = "1.2.3-anything.1+at_all.1234" # doesn't matter >>> tag_format = "v-anything_{version}_at-all" # doesn't matter >>> inverted_format = VersionTranslator._invert_tag_format_to_re(tag_format) >>> tag = tag_format.format(version=version) >>> m = inverted_format.match(tag) >>> assert m is not None >>> assert m.expand(r"\g") == version """ pat = re.compile( tag_format.replace(r"{version}", r"(?P.*)"), flags=re.VERBOSE, ) log.debug("inverted tag_format %r to %r", tag_format, pat.pattern) return pat def __init__( self, tag_format: str = "v{version}", prerelease_token: str = "rc", # noqa: S107 ) -> None: check_tag_format(tag_format) self.tag_format = tag_format self.prerelease_token = prerelease_token self.from_tag_re = self._invert_tag_format_to_re(self.tag_format) def from_string(self, version_str: str) -> Version: """ Return a Version instance from a string. Delegates directly to Version.parse, using the translator's own stored values for tag_format and prerelease """ return Version.parse( version_str, tag_format=self.tag_format, prerelease_token=self.prerelease_token, ) def from_tag(self, tag: str) -> Version | None: """ Return a Version instance from a Git tag, if tag_format matches the format which would have generated the tag from a version. Otherwise return None. For example, a tag of 'v1.2.3' should be matched if `tag_format = 'v{version}`, but not if `tag_format = staging--v{version}`. """ tag_match = self.from_tag_re.match(tag) if not tag_match: return None raw_version_str = tag_match.group("version") return self.from_string(raw_version_str) def str_to_tag(self, version_str: str) -> str: """Formats a version string into a tag name""" return self.tag_format.format(version=version_str) def __repr__(self) -> str: return ( f"{type(self).__qualname__}(tag_format={self.tag_format}, " f"prerelease_token={self.prerelease_token})" ) python-semantic-release-9.21.0/src/semantic_release/version/version.py000066400000000000000000000335371475670435200262110ustar00rootroot00000000000000from __future__ import annotations import logging import re from functools import wraps from itertools import zip_longest from typing import Callable, Union, overload from semantic_release.const import SEMVER_REGEX from semantic_release.enums import LevelBump from semantic_release.errors import InvalidVersion from semantic_release.helpers import check_tag_format log = logging.getLogger(__name__) # Very heavily inspired by semver.version:_comparator, I don't think there's # a cleaner way to do this # https://github.com/python-semver/python-semver/blob/b5317af9a7e99e6a86df98320e73be72d5adf0de/src/semver/version.py#L32 VersionComparable = Union["Version", str] VersionComparator = Callable[["Version", "Version"], bool] @overload def _comparator( *, type_guard: bool, ) -> Callable[[VersionComparator], VersionComparator]: ... @overload def _comparator( method: VersionComparator, *, type_guard: bool = True ) -> VersionComparator: ... def _comparator( method: VersionComparator | None = None, *, type_guard: bool = True ) -> VersionComparator | Callable[[VersionComparator], VersionComparator]: """ wrap a `Version` binop method to guard types and try to parse strings into Versions. use `type_guard = False` for `__eq__` and `__neq__` to make them return False if the wrong type is used, instead of erroring. """ if method is None: return lambda method: _comparator(method, type_guard=type_guard) @wraps(method) def _wrapper(self: Version, other: VersionComparable) -> bool: if not isinstance(other, (str, Version)): return False if not type_guard else NotImplemented if isinstance(other, str): try: other_v = self.parse( other, tag_format=self.tag_format, prerelease_token=self.prerelease_token, ) except InvalidVersion as ex: raise TypeError(str(ex)) from ex else: other_v = other return method(self, other_v) # type: ignore[misc] return _wrapper class Version: _VERSION_REGEX = SEMVER_REGEX def __init__( self, major: int, minor: int, patch: int, *, prerelease_token: str = "rc", # noqa: S107 prerelease_revision: int | None = None, build_metadata: str = "", tag_format: str = "v{version}", ) -> None: self.major = major self.minor = minor self.patch = patch self.prerelease_token = prerelease_token self.prerelease_revision = prerelease_revision self.build_metadata = build_metadata self._tag_format = tag_format @property def tag_format(self) -> str: return self._tag_format @tag_format.setter def tag_format(self, new_format: str) -> None: check_tag_format(new_format) self._tag_format = new_format # Maybe cache? @classmethod def parse( cls, version_str: str, tag_format: str = "v{version}", prerelease_token: str = "rc", # noqa: S107 ) -> Version: """ Parse version string to a Version instance. Inspired by `semver.version:VersionInfo.parse`, this implementation doesn't allow optional minor and patch versions. :param prerelease_token: will be ignored if the version string is a prerelease, the parsed token from `version_str` will be used instead. """ if not isinstance(version_str, str): raise InvalidVersion(f"{version_str!r} cannot be parsed as a Version") log.debug("attempting to parse string %r as Version", version_str) match = cls._VERSION_REGEX.fullmatch(version_str) if not match: raise InvalidVersion(f"{version_str!r} is not a valid Version") prerelease = match.group("prerelease") if prerelease: pm = re.match(r"(?P[a-zA-Z0-9-\.]+)\.(?P\d+)", prerelease) if not pm: raise NotImplementedError( f"{cls.__qualname__} currently supports only prereleases " r"of the format (-([a-zA-Z0-9-])\.\(\d+)), for example " r"'1.2.3-my-custom-3rc.4'." ) prerelease_token, prerelease_revision = pm.groups() log.debug( "parsed prerelease_token %s, prerelease_revision %s from version " "string %s", prerelease_token, prerelease_revision, version_str, ) else: prerelease_revision = None log.debug("version string %s parsed as a non-prerelease", version_str) build_metadata = match.group("buildmetadata") or "" log.debug( "parsed build metadata %r from version string %s", build_metadata, version_str, ) return Version( int(match.group("major")), int(match.group("minor")), int(match.group("patch")), prerelease_token=prerelease_token, prerelease_revision=( int(prerelease_revision) if prerelease_revision else None ), build_metadata=build_metadata, tag_format=tag_format, ) @property def is_prerelease(self) -> bool: return self.prerelease_revision is not None def __str__(self) -> str: full = f"{self.major}.{self.minor}.{self.patch}" prerelease = ( f"-{self.prerelease_token}.{self.prerelease_revision}" if self.prerelease_revision else "" ) build_metadata = f"+{self.build_metadata}" if self.build_metadata else "" return f"{full}{prerelease}{build_metadata}" def __repr__(self) -> str: prerelease_token_repr = ( repr(self.prerelease_token) if self.prerelease_token is not None else None ) prerelease_revision_repr = ( repr(self.prerelease_revision) if self.prerelease_revision is not None else None ) build_metadata_repr = ( repr(self.build_metadata) if self.build_metadata is not None else None ) return ( f"{type(self).__qualname__}(" + ", ".join( ( f"major={self.major}", f"minor={self.minor}", f"patch={self.patch}", f"prerelease_token={prerelease_token_repr}", f"prerelease_revision={prerelease_revision_repr}", f"build_metadata={build_metadata_repr}", f"tag_format={self.tag_format!r}", ) ) + ")" ) def as_tag(self) -> str: return self.tag_format.format(version=str(self)) def as_semver_tag(self) -> str: return f"v{self!s}" def bump(self, level: LevelBump) -> Version: """ Return a new Version instance according to the level specified to bump. Note this will intentionally drop the build metadata - that should be added elsewhere for the specific build producing this version. """ if type(level) != LevelBump: raise TypeError(f"Unexpected level {level!r}: expected {LevelBump!r}") log.debug("performing a %s level bump", level) if level is LevelBump.MAJOR: return Version( self.major + 1, 0, 0, prerelease_token=self.prerelease_token, prerelease_revision=1 if self.is_prerelease else None, tag_format=self.tag_format, ) if level is LevelBump.MINOR: return Version( self.major, self.minor + 1, 0, prerelease_token=self.prerelease_token, prerelease_revision=1 if self.is_prerelease else None, tag_format=self.tag_format, ) if level is LevelBump.PATCH: return Version( self.major, self.minor, self.patch + 1, prerelease_token=self.prerelease_token, prerelease_revision=1 if self.is_prerelease else None, tag_format=self.tag_format, ) if level is LevelBump.PRERELEASE_REVISION: return Version( self.major, self.minor, self.patch, prerelease_token=self.prerelease_token, prerelease_revision=1 if not self.is_prerelease else (self.prerelease_revision or 0) + 1, tag_format=self.tag_format, ) # for consistency, this creates a new instance regardless # only other option is level is LevelBump.NO_RELEASE return Version( self.major, self.minor, self.patch, prerelease_token=self.prerelease_token, prerelease_revision=self.prerelease_revision, tag_format=self.tag_format, ) # Enables Version + LevelBump. __add__ = bump def __hash__(self) -> int: # If we use str(self) we don't capture tag_format, so another # instance with a tag_format "special_{version}_format" would # collide with an instance using "v{version}"/other format return hash(self.__repr__()) @_comparator(type_guard=False) def __eq__(self, other: Version) -> bool: # type: ignore[override] # https://semver.org/#spec-item-11 - # build metadata is not used for comparison return all( getattr(self, attr) == getattr(other, attr) for attr in ( "major", "minor", "patch", "prerelease_token", "prerelease_revision", ) ) @_comparator(type_guard=False) def __neq__(self, other: Version) -> bool: return not self.__eq__(other) # mypy wants to compare signature types with __lt__, # but can't because of the decorator @_comparator def __gt__(self, other: Version) -> bool: # type: ignore[has-type] # https://semver.org/#spec-item-11 - # build metadata is not used for comparison # Note we only support the following versioning currently, which # is a subset of the full spec: # (\d+\.\d+\.\d+)(-\w+\.\d+)?(\+.*)? if self.major != other.major: return self.major > other.major if self.minor != other.minor: return self.minor > other.minor if self.patch != other.patch: return self.patch > other.patch # If just one is a prerelease, then self > other if other is the prerelease # If neither are prereleases then they're equal (so return False) if not (self.is_prerelease and other.is_prerelease): return other.is_prerelease # If both are prereleases... # According to the semver spec 11.4 there are many other rules for # comparing precedence of pre-release versions. Here we just compare # the prerelease tokens, and their revision numbers if self.prerelease_token != other.prerelease_token: for self_tk, other_tk in zip_longest( self.prerelease_token.split("."), other.prerelease_token.split("."), fillvalue=None, ): if self_tk == other_tk: continue if (self_tk is None) ^ (other_tk is None): # Longest token (i.e. non-None) is greater return other_tk is None # Lexical sort, e.g. "rc" > "beta" > "alpha" # we have eliminated that one or both might be None above, # but mypy doesn't recognise this return self_tk > other_tk # type: ignore[operator] # We have eliminated that one or both aren't prereleases by the above return self.prerelease_revision > other.prerelease_revision # type: ignore[operator] # noqa: E501 # mypy wants to compare signature types with __le__, # but can't because of the decorator @_comparator def __ge__(self, other: Version) -> bool: # type: ignore[has-type] return self.__gt__(other) or self.__eq__(other) @_comparator def __lt__(self, other: Version) -> bool: return not (self.__gt__(other) or self.__eq__(other)) @_comparator def __le__(self, other: Version) -> bool: return not self.__gt__(other) def __sub__(self, other: Version) -> LevelBump: if not isinstance(other, Version): return NotImplemented if self.major != other.major: return LevelBump.MAJOR if self.minor != other.minor: return LevelBump.MINOR if self.patch != other.patch: return LevelBump.PATCH if self.is_prerelease ^ other.is_prerelease: return max( self.finalize_version() - other.finalize_version(), LevelBump.PRERELEASE_REVISION, ) if self.prerelease_revision != other.prerelease_revision: return LevelBump.PRERELEASE_REVISION return LevelBump.NO_RELEASE def to_prerelease( self, token: str | None = None, revision: int | None = None ) -> Version: return Version( self.major, self.minor, self.patch, prerelease_token=token or self.prerelease_token, prerelease_revision=(revision or self.prerelease_revision) or 1, tag_format=self.tag_format, ) def finalize_version(self) -> Version: return Version( self.major, self.minor, self.patch, prerelease_token=self.prerelease_token, tag_format=self.tag_format, ) python-semantic-release-9.21.0/tests/000077500000000000000000000000001475670435200175225ustar00rootroot00000000000000python-semantic-release-9.21.0/tests/__init__.py000066400000000000000000000000001475670435200216210ustar00rootroot00000000000000python-semantic-release-9.21.0/tests/conftest.py000066400000000000000000000414601475670435200217260ustar00rootroot00000000000000"""Note: fixtures are stored in the tests/fixtures directory for better organisation""" from __future__ import annotations import json import os import sys from datetime import datetime, timedelta, timezone from hashlib import md5 from pathlib import Path from tempfile import NamedTemporaryFile from typing import TYPE_CHECKING import pytest from click.testing import CliRunner from filelock import FileLock from git import Commit, Repo from tests.const import PROJ_DIR from tests.fixtures import * from tests.util import copy_dir_tree, remove_dir_tree if TYPE_CHECKING: from tempfile import _TemporaryFileWrapper from typing import Any, Callable, Generator, Protocol, Sequence, TypedDict from filelock import AcquireReturnProxy from git import Actor from tests.fixtures.git_repo import RepoActions class MakeCommitObjFn(Protocol): def __call__(self, message: str) -> Commit: ... class NetrcFileFn(Protocol): def __call__(self, machine: str) -> _TemporaryFileWrapper[str]: ... class TeardownCachedDirFn(Protocol): def __call__(self, directory: Path) -> Path: ... class FormatDateStrFn(Protocol): def __call__(self, date: datetime) -> str: ... class GetStableDateNowFn(Protocol): def __call__(self) -> datetime: ... class GetMd5ForFileFn(Protocol): def __call__(self, file_path: Path | str) -> str: ... class GetMd5ForSetOfFilesFn(Protocol): """ Generates a hash for a set of files based on their contents This function will automatically filter out any 0-byte files or `__init__.py` files :param: files: A list of file paths to generate a hash for (MUST BE absolute paths) """ def __call__(self, files: Sequence[Path | str]) -> str: ... class GetAuthorizationToBuildRepoCacheFn(Protocol): def __call__(self, repo_name: str) -> AcquireReturnProxy | None: ... class BuildRepoOrCopyCacheFn(Protocol): def __call__( self, repo_name: str, build_spec_hash: str, build_repo_func: Callable[[Path], Sequence[RepoActions]], dest_dir: Path | None = None, ) -> Path: ... class RepoData(TypedDict): build_date: str build_spec_hash: str build_definition: Sequence[RepoActions] class GetCachedRepoDataFn(Protocol): def __call__(self, proj_dirname: str) -> RepoData | None: ... class SetCachedRepoDataFn(Protocol): def __call__(self, proj_dirname: str, data: RepoData) -> None: ... def pytest_addoption(parser: pytest.Parser, pluginmanager: pytest.PytestPluginManager): parser.addoption( "--comprehensive", help="Run full test suite including slow tests", default=False, action="store_true", ) def pytest_configure(config: pytest.Config): """ If no test selection modifications are provided, default to running only unit tests. See `pytest_collection_modifyitems` for more information on test selection modifications. """ user_desired_comprehensive_evaluation = config.getoption("--comprehensive") user_provided_filter = str(config.getoption("-k")) user_provided_markers = str(config.getoption("-m")) root_test_dir = Path(__file__).parent.relative_to(config.rootpath) user_provided_test_path = bool(config.args != [str(root_test_dir)]) # If no options are provided, default to running only unit tests if not any( ( user_desired_comprehensive_evaluation, user_provided_test_path, user_provided_filter, user_provided_markers, ) ): config.option.markexpr = pytest.mark.unit.name @pytest.hookimpl(trylast=True) def pytest_collection_modifyitems(config: pytest.Config, items: list[pytest.Item]): """ Test selection modifier based on markers and command line options. Examples -------- pytest only unit tests that are not marked comprehensive are executed pytest --comprehensive all tests are executed pytest -m unit only unit tests that are not marked comprehensive are executed (same as no options) pytest -m e2e only end-to-end tests that are not marked comprehensive are executed pytest -m e2e --comprehensive all end-to-end tests are executed pytest -m "not unit" only tests that are not marked unit or comprehensive are executed pytest -m "not unit" --comprehensive all tests that are not marked unit are executed pytest -k "test_name" only tests that match the substring "test_name" (but not marked comprehensive) are executed pytest -k "test_name" --comprehensive all tests that match the substring "test_name" are executed """ disable_comprehensive_tests = not config.getoption("--comprehensive") comprehensive_test_skip_marker = pytest.mark.skip( reason="comprehensive tests are disabled by default" ) user_provided_filter = str(config.getoption("-k")) if any((disable_comprehensive_tests,)): for item in items: if user_provided_filter and user_provided_filter in item.name: continue if disable_comprehensive_tests and "comprehensive" in item.keywords: item.add_marker(comprehensive_test_skip_marker) @pytest.fixture def cli_runner() -> CliRunner: return CliRunner(mix_stderr=False) @pytest.fixture(scope="session") def default_netrc_username() -> str: return "username" @pytest.fixture(scope="session") def default_netrc_password() -> str: return "password" @pytest.fixture(scope="session") def netrc_file( default_netrc_username: str, default_netrc_password: str, ) -> Generator[NetrcFileFn, None, None]: temporary_files: list[str] = [] def _netrc_file(machine: str) -> _TemporaryFileWrapper[str]: ctx_mgr = NamedTemporaryFile("w", delete=False) with ctx_mgr as netrc_fd: temporary_files.append(ctx_mgr.name) netrc_fd.write(f"machine {machine}{os.linesep}") netrc_fd.write(f"login {default_netrc_username}{os.linesep}") netrc_fd.write(f"password {default_netrc_password}{os.linesep}") netrc_fd.flush() return ctx_mgr try: yield _netrc_file finally: for temp_file in temporary_files: os.unlink(temp_file) @pytest.fixture(scope="session") def stable_today_date() -> datetime: curr_time = datetime.now(timezone.utc).astimezone() est_test_completion = curr_time + timedelta(hours=1) # exaggeration starting_day_of_year = curr_time.timetuple().tm_yday ending_day_of_year = est_test_completion.timetuple().tm_yday if starting_day_of_year < ending_day_of_year: return est_test_completion return curr_time @pytest.fixture(scope="session") def stable_now_date(stable_today_date: datetime) -> GetStableDateNowFn: def _stable_now_date() -> datetime: curr_time = datetime.now(timezone.utc).astimezone() return stable_today_date.replace( minute=curr_time.minute, second=curr_time.second, microsecond=curr_time.microsecond, ) return _stable_now_date @pytest.fixture(scope="session") def format_date_str() -> FormatDateStrFn: """Formats a date as how it would appear in the changelog (Must match local timezone)""" def _format_date_str(date: datetime) -> str: return date.strftime("%Y-%m-%d") return _format_date_str @pytest.fixture(scope="session") def today_date_str( stable_today_date: datetime, format_date_str: FormatDateStrFn ) -> str: """Today's Date formatted as how it would appear in the changelog (matches local timezone)""" return format_date_str(stable_today_date) @pytest.fixture(scope="session") def cached_files_dir(request: pytest.FixtureRequest) -> Path: return request.config.cache.mkdir("psr-cached-repos") @pytest.fixture(scope="session") def get_authorization_to_build_repo_cache( tmp_path_factory: pytest.TempPathFactory, worker_id: str ) -> GetAuthorizationToBuildRepoCacheFn: def _get_authorization_to_build_repo_cache( repo_name: str, ) -> AcquireReturnProxy | None: if worker_id == "master": # not executing with multiple workers via xdist, so just continue return None # get the temp directory shared by all workers root_tmp_dir = tmp_path_factory.getbasetemp().parent return FileLock(root_tmp_dir / f"{repo_name}.lock").acquire( timeout=30, blocking=True ) return _get_authorization_to_build_repo_cache @pytest.fixture(scope="session") def get_cached_repo_data(request: pytest.FixtureRequest) -> GetCachedRepoDataFn: def _get_cached_repo_data(proj_dirname: str) -> RepoData | None: cache_key = f"psr/repos/{proj_dirname}" return request.config.cache.get(cache_key, None) return _get_cached_repo_data @pytest.fixture(scope="session") def set_cached_repo_data(request: pytest.FixtureRequest) -> SetCachedRepoDataFn: def magic_serializer(obj: Any) -> Any: if isinstance(obj, Path): return obj.__fspath__() return obj def _set_cached_repo_data(proj_dirname: str, data: RepoData) -> None: cache_key = f"psr/repos/{proj_dirname}" request.config.cache.set( cache_key, json.loads(json.dumps(data, default=magic_serializer)), ) return _set_cached_repo_data @pytest.fixture(scope="session") def build_repo_or_copy_cache( cached_files_dir: Path, today_date_str: str, stable_now_date: GetStableDateNowFn, get_cached_repo_data: GetCachedRepoDataFn, set_cached_repo_data: SetCachedRepoDataFn, get_authorization_to_build_repo_cache: GetAuthorizationToBuildRepoCacheFn, ) -> BuildRepoOrCopyCacheFn: log_file = cached_files_dir.joinpath("repo-build.log") log_file_lock = FileLock(log_file.with_suffix(f"{log_file.suffix}.lock"), timeout=2) def _build_repo_w_cache_checking( repo_name: str, build_spec_hash: str, build_repo_func: Callable[[Path], Sequence[RepoActions]], dest_dir: Path | None = None, ) -> Path: # Blocking mechanism to synchronize xdist workers # Runs before the cache is checked because the cache will be set once the build is complete filelock = get_authorization_to_build_repo_cache(repo_name) cached_repo_data = get_cached_repo_data(repo_name) cached_repo_path = cached_files_dir.joinpath(repo_name) # Determine if the build spec has changed since the last cached build unmodified_build_spec = bool( cached_repo_data and cached_repo_data["build_spec_hash"] == build_spec_hash ) if not unmodified_build_spec or not cached_repo_path.exists(): # Cache miss, so build the repo (make sure its clean first) remove_dir_tree(cached_repo_path, force=True) cached_repo_path.mkdir(parents=True, exist_ok=True) build_msg = f"Building cached project files for {repo_name}" with log_file_lock, log_file.open(mode="a") as afd: afd.write(f"{stable_now_date().isoformat()}: {build_msg}...\n") # Marks the date when the cached repo was created set_cached_repo_data( repo_name, { "build_date": today_date_str, "build_spec_hash": build_spec_hash, "build_definition": build_repo_func(cached_repo_path), }, ) with log_file_lock, log_file.open(mode="a") as afd: afd.write(f"{stable_now_date().isoformat()}: {build_msg}...DONE\n") if filelock: filelock.lock.release() if dest_dir: copy_dir_tree(cached_repo_path, dest_dir) return dest_dir return cached_repo_path return _build_repo_w_cache_checking @pytest.fixture(scope="session") def teardown_cached_dir() -> Generator[TeardownCachedDirFn, None, None]: directories: list[Path] = [] def _teardown_cached_dir(directory: Path | str) -> Path: directories.append(Path(directory)) return directories[-1] try: yield _teardown_cached_dir finally: # clean up any registered cached directories for directory in directories: if directory.exists(): remove_dir_tree(directory, force=True) @pytest.fixture(scope="session") def make_commit_obj( commit_author: Actor, stable_now_date: GetStableDateNowFn ) -> MakeCommitObjFn: def _make_commit(message: str) -> Commit: commit_timestamp = round(stable_now_date().timestamp()) return Commit( repo=Repo(), binsha=Commit.NULL_BIN_SHA, message=message, author=commit_author, authored_date=commit_timestamp, committer=commit_author, committed_date=commit_timestamp, ) return _make_commit @pytest.fixture(scope="session") def get_md5_for_file() -> GetMd5ForFileFn: in_memory_cache = {} def _get_md5_for_file(file_path: Path | str) -> str: file_path = Path(file_path) rel_file_path = str(file_path.relative_to(PROJ_DIR)) if rel_file_path not in in_memory_cache: in_memory_cache[rel_file_path] = md5( # noqa: S324, not using hash for security file_path.read_bytes() ).hexdigest() return in_memory_cache[rel_file_path] return _get_md5_for_file @pytest.fixture(scope="session") def get_md5_for_set_of_files( get_md5_for_file: GetMd5ForFileFn, ) -> GetMd5ForSetOfFilesFn: in_memory_cache = {} def _get_md5_for_set_of_files(files: Sequence[Path | str]) -> str: # cast to a filtered and unique set of Path objects file_dependencies = sorted( set( filter( lambda file_path: file_path.name != "__init__.py" and file_path.stat().st_size > 0, (Path(f).absolute().resolve() for f in files), ) ) ) # create a hashable key of all dependencies to store the combined files hash cache_key = tuple( [str(file.relative_to(PROJ_DIR)) for file in file_dependencies] ) # check if we have done this before if cache_key not in in_memory_cache: # since we haven't done this before, generate the hash for each file file_hashes = [get_md5_for_file(file) for file in file_dependencies] # combine the hashes into a string and then hash the result and store it in_memory_cache[cache_key] = md5( # noqa: S324, not using hash for security str.join("\n", file_hashes).encode() ).hexdigest() # return the stored calculated hash for the set return in_memory_cache[cache_key] return _get_md5_for_set_of_files @pytest.fixture(scope="session") def clean_os_environment() -> dict[str, str]: return dict( filter( lambda k_v: k_v[1] is not None, # type: ignore[arg-type] { "PATH": os.getenv("PATH"), "HOME": os.getenv("HOME"), **( {} if sys.platform != "win32" else { # Windows Required variables "ALLUSERSAPPDATA": os.getenv("ALLUSERSAPPDATA"), "ALLUSERSPROFILE": os.getenv("ALLUSERSPROFILE"), "APPDATA": os.getenv("APPDATA"), "COMMONPROGRAMFILES": os.getenv("COMMONPROGRAMFILES"), "COMMONPROGRAMFILES(X86)": os.getenv("COMMONPROGRAMFILES(X86)"), "DEFAULTUSERPROFILE": os.getenv("DEFAULTUSERPROFILE"), "HOMEPATH": os.getenv("HOMEPATH"), "PATHEXT": os.getenv("PATHEXT"), "PROFILESFOLDER": os.getenv("PROFILESFOLDER"), "PROGRAMFILES": os.getenv("PROGRAMFILES"), "PROGRAMFILES(X86)": os.getenv("PROGRAMFILES(X86)"), "SYSTEM": os.getenv("SYSTEM"), "SYSTEM16": os.getenv("SYSTEM16"), "SYSTEM32": os.getenv("SYSTEM32"), "SYSTEMDRIVE": os.getenv("SYSTEMDRIVE"), "SYSTEMROOT": os.getenv("SYSTEMROOT"), "TEMP": os.getenv("TEMP"), "TMP": os.getenv("TMP"), "USERPROFILE": os.getenv("USERPROFILE"), "USERSID": os.getenv("USERSID"), "USERNAME": os.getenv("USERNAME"), "WINDIR": os.getenv("WINDIR"), } ), }.items(), ) ) python-semantic-release-9.21.0/tests/const.py000066400000000000000000000202021475670435200212160ustar00rootroot00000000000000from enum import Enum from pathlib import Path import git import semantic_release from semantic_release.cli.commands.main import Cli PROJ_DIR = Path(__file__).parent.parent.absolute().resolve() class RepoActionStep(str, Enum): CONFIGURE = "CONFIGURE" WRITE_CHANGELOGS = "WRITE_CHANGELOGS" GIT_CHECKOUT = "GIT_CHECKOUT" GIT_COMMIT = "GIT_COMMIT" GIT_MERGE = "GIT_MERGE" GIT_SQUASH = "GIT_SQUASH" GIT_TAG = "GIT_TAG" RELEASE = "RELEASE" MAKE_COMMITS = "MAKE_COMMITS" A_FULL_VERSION_STRING = "1.11.567" A_PRERELEASE_VERSION_STRING = "2.3.4-dev.23" A_FULL_VERSION_STRING_WITH_BUILD_METADATA = "4.2.3+build.12345" EXAMPLE_REPO_OWNER = "example_owner" EXAMPLE_REPO_NAME = "example_repo" EXAMPLE_HVCS_DOMAIN = "example.com" DEFAULT_BRANCH_NAME = "main" INITIAL_COMMIT_MESSAGE = "Initial commit" MAIN_PROG_NAME = str(semantic_release.__name__).replace("_", "-") SUCCESS_EXIT_CODE = 0 CHANGELOG_SUBCMD = Cli.SubCmds.CHANGELOG.name.lower() GENERATE_CONFIG_SUBCMD = Cli.SubCmds.GENERATE_CONFIG.name.lower() PUBLISH_SUBCMD = Cli.SubCmds.PUBLISH.name.lower() VERSION_SUBCMD = Cli.SubCmds.VERSION.name.lower() NULL_HEX_SHA = git.Object.NULL_HEX_SHA COMMIT_MESSAGE = "{version}\n\nAutomatically generated by python-semantic-release\n" SUPPORTED_ISSUE_CLOSURE_PREFIXES = [ "Close", "Closes", "Closed", "Closing", "Fix", "Fixes", "Fixed", "Fixing", "Resolve", "Resolves", "Resolved", "Resolving", "Implement", "Implements", "Implemented", "Implementing", ] CONVENTIONAL_COMMITS_CHORE = ("ci: added a commit lint job\n",) # Different in-scope commits that produce a certain release type CONVENTIONAL_COMMITS_PATCH = ( *CONVENTIONAL_COMMITS_CHORE, "fix: fixed voltage in the flux capacitor\n", ) CONVENTIONAL_COMMITS_MINOR = ( *CONVENTIONAL_COMMITS_PATCH, "feat: last minute rush order\n", ) # Take previous commits and insert a breaking change CONVENTIONAL_COMMITS_MAJOR = ( *CONVENTIONAL_COMMITS_MINOR, "fix!: big change\n\nBREAKING CHANGE: reworked something for previous feature\n", ) EMOJI_COMMITS_CHORE = ( ":broom: updated lint & code style\n", ":none: updated ci pipeline\n", ) EMOJI_COMMITS_PATCH = ( *EMOJI_COMMITS_CHORE, ":bug: fixed voltage in the flux capacitor\n", ) EMOJI_COMMITS_MINOR = ( *EMOJI_COMMITS_PATCH, ":sparkles::pencil: docs for something special\n", # Emoji in description should not be used to evaluate change type ":sparkles: last minute rush order\n\n:boom: Good thing we're 10x developers\n", ) EMOJI_COMMITS_MAJOR = ( *EMOJI_COMMITS_MINOR, ":boom: Move to the blockchain\n", ) # Note - the scipy commit fixtures for commits that should evaluate to the various scopes # are in tests/fixtures/scipy EXAMPLE_PROJECT_NAME = "example" EXAMPLE_PROJECT_VERSION = "0.0.0" EXAMPLE_PROJECT_LICENSE = "MIT" # Uses the internal defaults of semantic-release unless otherwise needed for testing # modify the pyproject toml as necessary for the test using update_pyproject_toml() # and derivative fixtures EXAMPLE_PYPROJECT_TOML_CONTENT = rf""" [project] license-expression = "{EXAMPLE_PROJECT_LICENSE}" [tool.poetry] name = "{EXAMPLE_PROJECT_NAME}" version = "{EXAMPLE_PROJECT_VERSION}" description = "Just an example" license = "{EXAMPLE_PROJECT_LICENSE}" authors = ["semantic-release "] readme = "README.md" classifiers = [ "Natural Language :: English", "Operating System :: OS Independent", "Programming Language :: Python :: 3 :: Only" ] [build-system] requires = ["poetry-core>=1.0.0"] build-backend = "poetry.core.masonry.api" [tool.semantic_release] version_variables = [ "src/{EXAMPLE_PROJECT_NAME}/_version.py:__version__", ] version_toml = ["pyproject.toml:tool.poetry.version"] """.lstrip() EXAMPLE_SETUP_CFG_CONTENT = rf""" [metadata] name = example version = {EXAMPLE_PROJECT_VERSION} description = Just an example really long_description = file: README.md long_description_content_type = text/markdown author = semantic-release author_email = not-a.real@email.com url = https://github.com/python-semantic-release/python-semantic-release python_requires = >=3.7 [options] zip_safe = True include_package_data = True packages = find: install_requires = PyYAML==6.0 pydantic==1.9.0 [options.extras_require] dev = tox twine==3.1.1 test = pytest pytest-cov pytest-mock pytest-aiohttp lint = flake8 black>=22.6.0 isort>=5.10.1 [options.packages.find] exclude = test* [bdist_wheel] universal = 1 [coverage:run] omit = */tests/* [tools:pytest] python_files = tests/test_*.py tests/**/test_*.py [isort] skip = .tox,venv default_section = THIRDPARTY known_first_party = {EXAMPLE_PROJECT_NAME},tests multi_line_output=3 include_trailing_comma=True force_grid_wrap=0 use_parentheses=True line_length=88 [flake8] max-line-length = 88 """.lstrip() EXAMPLE_SETUP_PY_CONTENT = rf""" import re import sys from setuptools import find_packages, setup def _read_long_description(): try: with open("readme.rst") as fd: return fd.read() except Exception: return None with open("{EXAMPLE_PROJECT_NAME}/_version.py", "r") as fd: version = re.search( r'^__version__\s*=\s*[\'"]([^\'"]*)[\'"]', fd.read(), re.MULTILINE ).group(1) try: from semantic_release import setup_hook setup_hook(sys.argv) except ImportError: pass setup( name="{EXAMPLE_PROJECT_NAME}", version="{EXAMPLE_PROJECT_VERSION}", url="http://github.com/python-semantic-release/python-semantic-release", author="semantic-release", author_email="not-a.real@email.com", description="Just an example", long_description=_read_long_description(), packages=find_packages(exclude=("tests",)), license="MIT", install_requires=[ "click>=7,<9", "click_log>=0.3,<1", "gitpython>=3.0.8,<4", "invoke>=1.4.1,<2", "semver>=2.10,<3", "twine>=3,<4", "requests>=2.25,<3", "wheel", "python-gitlab>=2,<4", # tomlkit used to be pinned to 0.7.0 # See https://github.com/python-semantic-release/python-semantic-release/issues/336 # and https://github.com/python-semantic-release/python-semantic-release/pull/337 # and https://github.com/python-semantic-release/python-semantic-release/issues/491 "tomlkit~=0.10", "dotty-dict>=1.3.0,<2", "dataclasses==0.8; python_version < '3.7.0'", "packaging", ], extras_require={{ "test": [ "coverage>=5,<6", "pytest>=5,<6", "pytest-xdist>=1,<2", "pytest-mock>=2,<3", "pytest-lazy-fixture~=0.6.3", "responses==0.13.3", "mock==1.3.0", ], "docs": ["Sphinx==1.3.6", "Jinja2==3.0.3"], "dev": ["tox", "isort", "black"], "mypy": ["mypy", "types-requests"], }}, include_package_data=True, classifiers=[ "Programming Language :: Python", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", ], ) """.lstrip() EXAMPLE_CHANGELOG_MD_CONTENT = r""" # CHANGELOG ## v1.0.0 * Various bugfixes, security enhancements * Extra cookies to enhance your experience * ~Removed~ simplified cookie opt-out handling logic """.lstrip() EXAMPLE_CHANGELOG_RST_CONTENT = r""" .. _changelog: ========= CHANGELOG ========= .. example project base changelog .. _changelog-v1.0.0: v1.0.0 (1970-01-01) =================== * Various bugfixes, security enhancements * Extra cookies to enhance your experience * ~Removed~ simplified cookie opt-out handling logic """.lstrip() EXAMPLE_RELEASE_NOTES_TEMPLATE = r""" ## What's Changed {% for type_, commits in release["elements"] | dictsort %}{{ "### %s" | format(type_ | title) }}{% if type_ != "unknown" %}{% for commit in commits %}{{ "* %s" | format(commit.descriptions[0] | trim) }}{% endfor %}{% endif %}{% endfor %} """.lstrip() # noqa: E501 RELEASE_NOTES = "# Release Notes" python-semantic-release-9.21.0/tests/e2e/000077500000000000000000000000001475670435200201755ustar00rootroot00000000000000python-semantic-release-9.21.0/tests/e2e/__init__.py000066400000000000000000000000001475670435200222740ustar00rootroot00000000000000python-semantic-release-9.21.0/tests/e2e/cmd_changelog/000077500000000000000000000000001475670435200227475ustar00rootroot00000000000000python-semantic-release-9.21.0/tests/e2e/cmd_changelog/__init__.py000066400000000000000000000000001475670435200250460ustar00rootroot00000000000000python-semantic-release-9.21.0/tests/e2e/cmd_changelog/test_changelog.py000066400000000000000000001272241475670435200263170ustar00rootroot00000000000000from __future__ import annotations import os import sys from textwrap import dedent from typing import TYPE_CHECKING from unittest import mock import pytest import requests_mock from pytest_lazy_fixtures.lazy_fixture import lf as lazy_fixture from requests import Session import semantic_release.hvcs.github from semantic_release.changelog.context import ChangelogMode from semantic_release.cli.commands.main import main from semantic_release.cli.config import ChangelogOutputFormat from semantic_release.hvcs.github import Github from semantic_release.version.version import Version from tests.const import ( CHANGELOG_SUBCMD, EXAMPLE_HVCS_DOMAIN, EXAMPLE_RELEASE_NOTES_TEMPLATE, EXAMPLE_REPO_NAME, EXAMPLE_REPO_OWNER, MAIN_PROG_NAME, ) from tests.fixtures.example_project import ( change_to_ex_proj_dir, changelog_md_file, changelog_rst_file, default_md_changelog_insertion_flag, default_rst_changelog_insertion_flag, example_changelog_md, example_changelog_rst, ) from tests.fixtures.repos import ( repo_w_git_flow_conventional_commits, repo_w_git_flow_emoji_commits, repo_w_git_flow_scipy_commits, repo_w_git_flow_w_alpha_prereleases_n_conventional_commits, repo_w_git_flow_w_alpha_prereleases_n_emoji_commits, repo_w_git_flow_w_alpha_prereleases_n_scipy_commits, repo_w_git_flow_w_beta_alpha_rev_prereleases_n_conventional_commits, repo_w_git_flow_w_beta_alpha_rev_prereleases_n_emoji_commits, repo_w_git_flow_w_beta_alpha_rev_prereleases_n_scipy_commits, repo_w_git_flow_w_rc_n_alpha_prereleases_n_conventional_commits, repo_w_git_flow_w_rc_n_alpha_prereleases_n_conventional_commits_using_tag_format, repo_w_git_flow_w_rc_n_alpha_prereleases_n_emoji_commits, repo_w_git_flow_w_rc_n_alpha_prereleases_n_scipy_commits, repo_w_github_flow_w_default_release_channel_conventional_commits, repo_w_github_flow_w_default_release_channel_emoji_commits, repo_w_github_flow_w_default_release_channel_scipy_commits, repo_w_github_flow_w_feature_release_channel_conventional_commits, repo_w_github_flow_w_feature_release_channel_emoji_commits, repo_w_github_flow_w_feature_release_channel_scipy_commits, repo_w_no_tags_conventional_commits, repo_w_no_tags_emoji_commits, repo_w_no_tags_scipy_commits, repo_w_trunk_only_conventional_commits, repo_w_trunk_only_emoji_commits, repo_w_trunk_only_n_prereleases_conventional_commits, repo_w_trunk_only_n_prereleases_emoji_commits, repo_w_trunk_only_n_prereleases_scipy_commits, repo_w_trunk_only_scipy_commits, ) from tests.util import ( add_text_to_file, assert_exit_code, assert_successful_exit_code, get_func_qual_name, get_release_history_from_context, ) if TYPE_CHECKING: from pathlib import Path from typing import TypedDict from click.testing import CliRunner from requests_mock import Mocker from tests.e2e.conftest import RetrieveRuntimeContextFn from tests.fixtures.example_project import ( ExProjectDir, UpdatePyprojectTomlFn, UseReleaseNotesTemplateFn, ) from tests.fixtures.git_repo import ( BuildRepoFromDefinitionFn, BuiltRepoResult, CommitConvention, CommitDef, CommitNReturnChangelogEntryFn, GetCommitDefFn, GetRepoDefinitionFn, GetVersionsFromRepoBuildDefFn, ) class Commit2Section(TypedDict): conventional: Commit2SectionCommit emoji: Commit2SectionCommit scipy: Commit2SectionCommit class Commit2SectionCommit(TypedDict): commit: CommitDef section: str @pytest.mark.parametrize("arg0", [None, "--post-to-release-tag"]) @pytest.mark.parametrize( "repo_result", [ lazy_fixture(repo_fixture) for repo_fixture in ( # Only need to test when it has tags or no tags # DO NOT need to consider all repo types as it doesn't change no-op behavior repo_w_no_tags_conventional_commits.__name__, repo_w_trunk_only_conventional_commits.__name__, ) ], ) def test_changelog_noop_is_noop( repo_result: BuiltRepoResult, arg0: str | None, cli_runner: CliRunner, get_versions_from_repo_build_def: GetVersionsFromRepoBuildDefFn, ): repo = repo_result["repo"] repo_def = repo_result["definition"] released_versions = get_versions_from_repo_build_def(repo_def) version_str = released_versions[-1] if len(released_versions) > 0 else None repo.git.reset("--hard") # Set up a requests HTTP session so we can catch the HTTP calls and ensure # they're made session = Session() session.hooks = {"response": [lambda r, *_, **__: r.raise_for_status()]} mock_adapter = requests_mock.Adapter() mock_adapter.register_uri( method=requests_mock.ANY, url=requests_mock.ANY, json={"id": 10001} ) session.mount("http://", mock_adapter) session.mount("https://", mock_adapter) with mock.patch( get_func_qual_name(semantic_release.hvcs.github.build_requests_session), return_value=session, ), requests_mock.Mocker(session=session) as mocker: args = [arg0, f"v{version_str}"] if version_str and arg0 else [] cli_cmd = [MAIN_PROG_NAME, "--noop", CHANGELOG_SUBCMD, *args] result = cli_runner.invoke(main, cli_cmd[1:]) # Evaluate assert_successful_exit_code(result, cli_cmd) assert not repo.git.status(short=True) if args: assert not mocker.called assert not mock_adapter.called @pytest.mark.parametrize( "changelog_file, insertion_flag", [ ( # ChangelogOutputFormat.MARKDOWN lazy_fixture(example_changelog_md.__name__), lazy_fixture(default_md_changelog_insertion_flag.__name__), ), ( # ChangelogOutputFormat.RESTRUCTURED_TEXT lazy_fixture(example_changelog_rst.__name__), lazy_fixture(default_rst_changelog_insertion_flag.__name__), ), ], ) @pytest.mark.parametrize( "repo_result", [ *[ lazy_fixture(repo_fixture) for repo_fixture in [ # All commit types and one without a release repo_w_no_tags_conventional_commits.__name__, repo_w_trunk_only_conventional_commits.__name__, repo_w_trunk_only_emoji_commits.__name__, repo_w_trunk_only_scipy_commits.__name__, ] ], *[ pytest.param(lazy_fixture(repo_fixture), marks=pytest.mark.comprehensive) for repo_fixture in [ # repo_w_no_tags_conventional_commits.__name__, repo_w_no_tags_emoji_commits.__name__, repo_w_no_tags_scipy_commits.__name__, # repo_w_trunk_only_conventional_commits.__name__, # repo_w_trunk_only_emoji_commits.__name__, # repo_w_trunk_only_scipy_commits.__name__, repo_w_trunk_only_n_prereleases_conventional_commits.__name__, repo_w_trunk_only_n_prereleases_emoji_commits.__name__, repo_w_trunk_only_n_prereleases_scipy_commits.__name__, repo_w_github_flow_w_default_release_channel_conventional_commits.__name__, repo_w_github_flow_w_default_release_channel_emoji_commits.__name__, repo_w_github_flow_w_default_release_channel_scipy_commits.__name__, repo_w_github_flow_w_feature_release_channel_conventional_commits.__name__, repo_w_github_flow_w_feature_release_channel_emoji_commits.__name__, repo_w_github_flow_w_feature_release_channel_scipy_commits.__name__, repo_w_git_flow_conventional_commits.__name__, repo_w_git_flow_emoji_commits.__name__, repo_w_git_flow_scipy_commits.__name__, repo_w_git_flow_w_beta_alpha_rev_prereleases_n_conventional_commits.__name__, repo_w_git_flow_w_beta_alpha_rev_prereleases_n_emoji_commits.__name__, repo_w_git_flow_w_beta_alpha_rev_prereleases_n_scipy_commits.__name__, repo_w_git_flow_w_alpha_prereleases_n_conventional_commits.__name__, repo_w_git_flow_w_alpha_prereleases_n_emoji_commits.__name__, repo_w_git_flow_w_alpha_prereleases_n_scipy_commits.__name__, repo_w_git_flow_w_rc_n_alpha_prereleases_n_conventional_commits.__name__, repo_w_git_flow_w_rc_n_alpha_prereleases_n_emoji_commits.__name__, repo_w_git_flow_w_rc_n_alpha_prereleases_n_scipy_commits.__name__, repo_w_git_flow_w_rc_n_alpha_prereleases_n_conventional_commits_using_tag_format.__name__, ] ], ], ) def test_changelog_content_regenerated( repo_result: BuiltRepoResult, cli_runner: CliRunner, update_pyproject_toml: UpdatePyprojectTomlFn, changelog_file: Path, insertion_flag: str, ): # Set the project configurations update_pyproject_toml( "tool.semantic_release.changelog.mode", ChangelogMode.INIT.value ) update_pyproject_toml( "tool.semantic_release.changelog.default_templates.changelog_file", str(changelog_file.name), ) # Because we are in init mode, the insertion flag is not present in the changelog # we must take it out manually because our repo generation fixture includes it automatically with changelog_file.open(newline=os.linesep) as rfd: # use os.linesep here because the insertion flag is os-specific # but convert the content to universal newlines for comparison expected_changelog_content = ( rfd.read().replace(f"{insertion_flag}{os.linesep}", "").replace("\r", "") ) # Remove the changelog and then check that we can regenerate it os.remove(str(changelog_file.resolve())) # Act cli_cmd = [MAIN_PROG_NAME, CHANGELOG_SUBCMD] result = cli_runner.invoke(main, cli_cmd[1:]) # Evaluate assert_successful_exit_code(result, cli_cmd) # Check that the changelog file was re-created assert changelog_file.exists() actual_content = changelog_file.read_text() # Check that the changelog content is the same as before assert expected_changelog_content == actual_content @pytest.mark.parametrize( "changelog_file, insertion_flag", [ ( # ChangelogOutputFormat.MARKDOWN lazy_fixture(example_changelog_md.__name__), lazy_fixture(default_md_changelog_insertion_flag.__name__), ), ( # ChangelogOutputFormat.RESTRUCTURED_TEXT lazy_fixture(example_changelog_rst.__name__), lazy_fixture(default_rst_changelog_insertion_flag.__name__), ), ], ) @pytest.mark.usefixtures(change_to_ex_proj_dir.__name__) def test_changelog_content_regenerated_masked_initial_release( build_repo_from_definition: BuildRepoFromDefinitionFn, get_repo_definition_4_trunk_only_repo_w_tags: GetRepoDefinitionFn, example_project_dir: ExProjectDir, cli_runner: CliRunner, changelog_file: Path, insertion_flag: str, ): build_definition = get_repo_definition_4_trunk_only_repo_w_tags( commit_type="conventional", mask_initial_release=True, extra_configs={ "tool.semantic_release.changelog.default_templates.changelog_file": str( changelog_file.name ), "tool.semantic_release.changelog.mode": ChangelogMode.INIT.value, }, ) build_repo_from_definition(example_project_dir, build_definition) # Because we are in init mode, the insertion flag is not present in the changelog # we must take it out manually because our repo generation fixture includes it automatically with changelog_file.open(newline=os.linesep) as rfd: # use os.linesep here because the insertion flag is os-specific # but convert the content to universal newlines for comparison expected_changelog_content = ( rfd.read().replace(f"{insertion_flag}{os.linesep}", "").replace("\r", "") ) # Remove the changelog and then check that we can regenerate it os.remove(str(changelog_file.resolve())) # Act cli_cmd = [MAIN_PROG_NAME, CHANGELOG_SUBCMD] result = cli_runner.invoke(main, cli_cmd[1:]) # Evaluate assert_successful_exit_code(result, cli_cmd) # Check that the changelog file was re-created assert changelog_file.exists() actual_content = changelog_file.read_text() # Check that the changelog content is the same as before assert expected_changelog_content == actual_content @pytest.mark.parametrize( "changelog_file", [ lazy_fixture(example_changelog_md.__name__), lazy_fixture(example_changelog_rst.__name__), ], ) @pytest.mark.parametrize( "repo_result", [ lazy_fixture(repo_fixture) for repo_fixture in [ repo_w_trunk_only_conventional_commits.__name__, repo_w_trunk_only_emoji_commits.__name__, repo_w_trunk_only_scipy_commits.__name__, ] ], ) def test_changelog_update_mode_unchanged( repo_result: BuiltRepoResult, cli_runner: CliRunner, update_pyproject_toml: UpdatePyprojectTomlFn, changelog_file: Path, ): """ Given that the changelog file already exists for the current release, When the changelog command is run in "update" mode, Then the changelog file is not modified. """ # Set the project configurations update_pyproject_toml( "tool.semantic_release.changelog.mode", ChangelogMode.UPDATE.value ) update_pyproject_toml( "tool.semantic_release.changelog.default_templates.changelog_file", str(changelog_file.name), ) # Capture the expected changelog content expected_changelog_content = changelog_file.read_text() # Act cli_cmd = [MAIN_PROG_NAME, CHANGELOG_SUBCMD] result = cli_runner.invoke(main, cli_cmd[1:]) # Evaluate assert_successful_exit_code(result, cli_cmd) # Check that the changelog file was re-created assert changelog_file.exists() actual_content = changelog_file.read_text() # Check that the changelog content is the same as before assert expected_changelog_content == actual_content @pytest.mark.parametrize( "changelog_file", [ lazy_fixture(example_changelog_md.__name__), lazy_fixture(example_changelog_rst.__name__), ], ) @pytest.mark.parametrize( "repo_result", [ lazy_fixture(repo_fixture) for repo_fixture in [ repo_w_no_tags_conventional_commits.__name__, repo_w_no_tags_emoji_commits.__name__, repo_w_no_tags_scipy_commits.__name__, repo_w_trunk_only_conventional_commits.__name__, repo_w_trunk_only_emoji_commits.__name__, repo_w_trunk_only_scipy_commits.__name__, ] ], ) def test_changelog_update_mode_no_prev_changelog( repo_result: BuiltRepoResult, cli_runner: CliRunner, update_pyproject_toml: UpdatePyprojectTomlFn, changelog_file: Path, ): """ Given that the changelog file does not exist, When the changelog command is run in "update" mode, Then the changelog file is initialized with the default content. """ # Set the project configurations update_pyproject_toml( "tool.semantic_release.changelog.mode", ChangelogMode.UPDATE.value ) update_pyproject_toml( "tool.semantic_release.changelog.default_templates.changelog_file", str(changelog_file.name), ) # Capture the expected changelog content expected_changelog_content = changelog_file.read_text() # Remove any previous changelog to update os.remove(str(changelog_file.resolve())) # Act cli_cmd = [MAIN_PROG_NAME, CHANGELOG_SUBCMD] result = cli_runner.invoke(main, cli_cmd[1:]) # Evaluate assert_successful_exit_code(result, cli_cmd) # Check that the changelog file was re-created assert changelog_file.exists() actual_content = changelog_file.read_text() # Check that the changelog content is the same as before assert expected_changelog_content == actual_content @pytest.mark.parametrize( "changelog_file, insertion_flag", [ ( # ChangelogOutputFormat.MARKDOWN lazy_fixture(example_changelog_md.__name__), lazy_fixture(default_md_changelog_insertion_flag.__name__), ), ( # ChangelogOutputFormat.RESTRUCTURED_TEXT lazy_fixture(example_changelog_rst.__name__), lazy_fixture(default_rst_changelog_insertion_flag.__name__), ), ], ) @pytest.mark.parametrize( "repo_result", [ lazy_fixture(repo_fixture) for repo_fixture in [ repo_w_trunk_only_conventional_commits.__name__, repo_w_trunk_only_emoji_commits.__name__, repo_w_trunk_only_scipy_commits.__name__, ] ], ) def test_changelog_update_mode_no_flag( repo_result: BuiltRepoResult, cli_runner: CliRunner, update_pyproject_toml: UpdatePyprojectTomlFn, changelog_file: Path, insertion_flag: str, ): """ Given a changelog template without the insertion flag, When the changelog command is run in "update" mode, Then the changelog is not modified. """ # Set the project configurations update_pyproject_toml( "tool.semantic_release.changelog.mode", ChangelogMode.UPDATE.value ) update_pyproject_toml( "tool.semantic_release.changelog.default_templates.changelog_file", str(changelog_file.name), ) # Remove the insertion flag from the changelog changelog_file.write_text( changelog_file.read_text().replace( f"{insertion_flag}\n", "", 1, ) ) # Capture the expected changelog content expected_changelog_content = changelog_file.read_text() # Act cli_cmd = [MAIN_PROG_NAME, CHANGELOG_SUBCMD] result = cli_runner.invoke(main, cli_cmd[1:]) # Evaluate assert_successful_exit_code(result, cli_cmd) # Ensure changelog exists assert changelog_file.exists() actual_content = changelog_file.read_text() # Check that the changelog content is the same as before assert expected_changelog_content == actual_content @pytest.mark.parametrize( "changelog_format, changelog_file", [ ( ChangelogOutputFormat.MARKDOWN, lazy_fixture(changelog_md_file.__name__), ), ( ChangelogOutputFormat.RESTRUCTURED_TEXT, lazy_fixture(changelog_rst_file.__name__), ), ], ) @pytest.mark.parametrize( "repo_result", [ lazy_fixture(repo_fixture) for repo_fixture in [ # MUST HAVE at least 2 tags! repo_w_trunk_only_conventional_commits.__name__, repo_w_trunk_only_emoji_commits.__name__, repo_w_trunk_only_scipy_commits.__name__, ] ], ) def test_changelog_update_mode_no_header( repo_result: BuiltRepoResult, cli_runner: CliRunner, update_pyproject_toml: UpdatePyprojectTomlFn, changelog_format: ChangelogOutputFormat, changelog_file: Path, default_md_changelog_insertion_flag: str, default_rst_changelog_insertion_flag: str, get_versions_from_repo_build_def: GetVersionsFromRepoBuildDefFn, ): """ Given a changelog template with the insertion flag at the beginning of the file, When the changelog command is run in "update" mode, Then the changelog is rebuilt with the latest release prepended to the existing content. """ repo = repo_result["repo"] # Mappings of correct fixtures to use based on the changelog format insertion_flags = { ChangelogOutputFormat.MARKDOWN: ( "# CHANGELOG{ls}{ls}{flag}".format( ls=os.linesep, flag=default_md_changelog_insertion_flag, ) ), ChangelogOutputFormat.RESTRUCTURED_TEXT: ( ".. _changelog:{ls}{ls}{h1_border}{ls}CHANGELOG{ls}{h1_border}{ls}{ls}{flag}".format( ls=os.linesep, h1_border="=" * 9, flag=default_rst_changelog_insertion_flag, ) ), } # Select the correct insertion flag based on the format insertion_flag = insertion_flags[changelog_format] # Set the project configurations update_pyproject_toml( "tool.semantic_release.changelog.mode", ChangelogMode.UPDATE.value ) update_pyproject_toml( "tool.semantic_release.changelog.default_templates.changelog_file", str(changelog_file.name), ) update_pyproject_toml( "tool.semantic_release.changelog.insertion_flag", insertion_flag, ) # Capture the expected changelog content of current release with changelog_file.open(newline=os.linesep) as rfd: expected_changelog_content = rfd.read() # Reset changelog file to last release previous_tag = f'v{get_versions_from_repo_build_def(repo_result["definition"])[-2]}' repo.git.checkout(previous_tag, "--", str(changelog_file.name)) # Act cli_cmd = [MAIN_PROG_NAME, CHANGELOG_SUBCMD] result = cli_runner.invoke(main, cli_cmd[1:]) # Evaluate assert_successful_exit_code(result, cli_cmd) # Ensure changelog exists assert changelog_file.exists() with changelog_file.open(newline=os.linesep) as rfd: actual_content = rfd.read() # Check that the changelog content is the same as before assert expected_changelog_content == actual_content @pytest.mark.parametrize( "changelog_format, changelog_file, insertion_flag", [ ( ChangelogOutputFormat.MARKDOWN, lazy_fixture(example_changelog_md.__name__), lazy_fixture(default_md_changelog_insertion_flag.__name__), ), ( ChangelogOutputFormat.RESTRUCTURED_TEXT, lazy_fixture(example_changelog_rst.__name__), lazy_fixture(default_rst_changelog_insertion_flag.__name__), ), ], ) @pytest.mark.parametrize( "repo_result", [ lazy_fixture(repo_fixture) for repo_fixture in [ # MUST HAVE at least 2 tags! repo_w_trunk_only_conventional_commits.__name__, repo_w_trunk_only_emoji_commits.__name__, repo_w_trunk_only_scipy_commits.__name__, ] ], ) def test_changelog_update_mode_no_footer( repo_result: BuiltRepoResult, cli_runner: CliRunner, update_pyproject_toml: UpdatePyprojectTomlFn, changelog_format: ChangelogOutputFormat, changelog_file: Path, insertion_flag: str, get_versions_from_repo_build_def: GetVersionsFromRepoBuildDefFn, ): """ Given a changelog template with the insertion flag at the end of the file, When the changelog command is run in "update" mode, Then the changelog is rebuilt with only the latest release. """ repo_result["repo"] # Mappings of correct fixtures to use based on the changelog format prev_version_tag = ( f"v{get_versions_from_repo_build_def(repo_result['definition'])[-2]}" ) split_flags = { ChangelogOutputFormat.MARKDOWN: f"\n\n## {prev_version_tag}", ChangelogOutputFormat.RESTRUCTURED_TEXT: f"\n\n.. _changelog-{prev_version_tag}:", } # Select the correct variable based on the format split_flag = split_flags[changelog_format] # Set the project configurations update_pyproject_toml( "tool.semantic_release.changelog.mode", ChangelogMode.UPDATE.value ) update_pyproject_toml( "tool.semantic_release.changelog.default_templates.changelog_file", str(changelog_file.name), ) # Capture the expected changelog content of current release (w/ universal newlines) # NOTE: universal newlines is fine because we use our split flag above is also universal expected_changelog_content = changelog_file.read_text().split(split_flag)[0] # Determine the contents to save while truncating the rest with changelog_file.open(newline=os.linesep) as rfd: # read file contents grabbing only the text before the insertion flag truncated_contents = str.join( "", [ rfd.read().split(insertion_flag)[0], insertion_flag, os.linesep, ], ) # Remove any text after the insertion flag # force output to not perform any newline translations with changelog_file.open(mode="w", newline="") as wfd: # overwrite the file with truncated contents wfd.write(truncated_contents) wfd.flush() # Act cli_cmd = [MAIN_PROG_NAME, CHANGELOG_SUBCMD] result = cli_runner.invoke(main, cli_cmd[1:]) # Evaluate assert_successful_exit_code(result, cli_cmd) # Ensure changelog exists assert changelog_file.exists() actual_content = changelog_file.read_text() # Check that the changelog content only includes the latest release as there # is no previous release information as the insertion flag is at the end of the file assert expected_changelog_content == actual_content @pytest.mark.parametrize( "changelog_file, insertion_flag", [ ( # ChangelogOutputFormat.MARKDOWN lazy_fixture(example_changelog_md.__name__), lazy_fixture(default_md_changelog_insertion_flag.__name__), ), ( # ChangelogOutputFormat.RESTRUCTURED_TEXT lazy_fixture(example_changelog_rst.__name__), lazy_fixture(default_rst_changelog_insertion_flag.__name__), ), ], ) @pytest.mark.parametrize( "repo_result", [ lazy_fixture(repo_fixture) for repo_fixture in [ # Must not have a single release/tag repo_w_no_tags_conventional_commits.__name__, repo_w_no_tags_emoji_commits.__name__, repo_w_no_tags_scipy_commits.__name__, ] ], ) def test_changelog_update_mode_no_releases( repo_result: BuiltRepoResult, cli_runner: CliRunner, update_pyproject_toml: UpdatePyprojectTomlFn, changelog_file: Path, insertion_flag: str, ): """ Given the repository has no releases and the user has provided a initialized changelog, When the changelog command is run in "update" mode, Then the changelog is populated with unreleased changes. """ # Set the project configurations update_pyproject_toml( "tool.semantic_release.changelog.mode", ChangelogMode.UPDATE.value ) update_pyproject_toml( "tool.semantic_release.changelog.default_templates.changelog_file", str(changelog_file.name), ) # Custom text to maintain (must be different from the default) custom_text = "---{ls}{ls}Custom footer text{ls}".format(ls=os.linesep) # Capture and modify the current changelog content to become the expected output # We much use os.linesep here since the insertion flag is os-specific with changelog_file.open(newline=os.linesep) as rfd: initial_changelog_parts = rfd.read().split(insertion_flag) # content is os-specific because of the insertion flag & how we read the original file expected_changelog_content = str.join( insertion_flag, [ initial_changelog_parts[0], str.join( os.linesep, [ initial_changelog_parts[1], "", custom_text, ], ), ], ) # Grab the Unreleased changelog & create the initalized user changelog # force output to not perform any newline translations with changelog_file.open(mode="w", newline="") as wfd: wfd.write( str.join( insertion_flag, [initial_changelog_parts[0], f"{os.linesep * 2}{custom_text}"], ) ) # Act cli_cmd = [MAIN_PROG_NAME, CHANGELOG_SUBCMD] result = cli_runner.invoke(main, cli_cmd[1:]) # Evaluate assert_successful_exit_code(result, cli_cmd) # Ensure changelog exists assert changelog_file.exists() # Capture the new changelog content (os aware because of expected content) with changelog_file.open(newline=os.linesep) as rfd: actual_content = rfd.read() # Check that the changelog footer is maintained and updated with Unreleased info assert expected_changelog_content == actual_content @pytest.mark.parametrize( "changelog_format, changelog_file, insertion_flag", [ ( ChangelogOutputFormat.MARKDOWN, lazy_fixture(example_changelog_md.__name__), lazy_fixture(default_md_changelog_insertion_flag.__name__), ), ( ChangelogOutputFormat.RESTRUCTURED_TEXT, lazy_fixture(example_changelog_rst.__name__), lazy_fixture(default_rst_changelog_insertion_flag.__name__), ), ], ) @pytest.mark.parametrize( "repo_result, commit_type", [ (lazy_fixture(repo_fixture), repo_fixture.split("_")[-2]) for repo_fixture in [ repo_w_trunk_only_conventional_commits.__name__, repo_w_trunk_only_emoji_commits.__name__, repo_w_trunk_only_scipy_commits.__name__, ] ], ) def test_changelog_update_mode_unreleased_n_released( repo_result: BuiltRepoResult, commit_type: CommitConvention, changelog_format: ChangelogOutputFormat, cli_runner: CliRunner, update_pyproject_toml: UpdatePyprojectTomlFn, example_git_ssh_url: str, file_in_repo: str, commit_n_rtn_changelog_entry: CommitNReturnChangelogEntryFn, changelog_file: Path, insertion_flag: str, get_commit_def_of_conventional_commit: GetCommitDefFn, get_commit_def_of_emoji_commit: GetCommitDefFn, get_commit_def_of_scipy_commit: GetCommitDefFn, ): """ Given there are unreleased changes and a previous release in the changelog, When the changelog command is run in "update" mode, Then the changelog is only updated with the unreleased changes. """ repo = repo_result["repo"] # Set the project configurations update_pyproject_toml( "tool.semantic_release.changelog.mode", ChangelogMode.UPDATE.value ) update_pyproject_toml( "tool.semantic_release.changelog.default_templates.changelog_file", str(changelog_file.name), ) commit_n_section: Commit2Section = { "conventional": { "commit": get_commit_def_of_conventional_commit( "perf: improve the performance of the application" ), "section": "Performance Improvements", }, "emoji": { "commit": get_commit_def_of_emoji_commit( ":zap: improve the performance of the application" ), "section": ":zap:", }, "scipy": { "commit": get_commit_def_of_scipy_commit("MAINT: fix an issue"), "section": "Fix", }, } # Custom text to maintain (must be different from the default) custom_text = "---\n\nCustom footer text\n" # Update the changelog with the custom footer text changelog_file.write_text( str.join( "\n\n", [ changelog_file.read_text(), custom_text, ], ) ) # Capture the current changelog content so we can estimate the expected output # We much use os.linesep here since the insertion flag is os-specific with changelog_file.open(newline=os.linesep) as rfd: initial_changelog_parts = rfd.read().split(insertion_flag) # Make a change to the repo to create unreleased changes add_text_to_file(repo, file_in_repo) unreleased_commit_entry = commit_n_rtn_changelog_entry( repo, commit_n_section[commit_type]["commit"], ) hvcs = Github(example_git_ssh_url, hvcs_domain=EXAMPLE_HVCS_DOMAIN) unreleased_change_variants = { ChangelogOutputFormat.MARKDOWN: dedent( f""" ## Unreleased ### {commit_n_section[commit_type]["section"]} - {unreleased_commit_entry['desc'].capitalize()} ([`{unreleased_commit_entry['sha'][:7]}`]({hvcs.commit_hash_url(unreleased_commit_entry['sha'])})) """ ), ChangelogOutputFormat.RESTRUCTURED_TEXT: dedent( f""" .. _changelog-unreleased: Unreleased ========== {commit_n_section[commit_type]["section"]} {"-" * len(commit_n_section[commit_type]["section"])} * {unreleased_commit_entry['desc'].capitalize()} (`{unreleased_commit_entry['sha'][:7]}`_) .. _{unreleased_commit_entry['sha'][:7]}: {hvcs.commit_hash_url(unreleased_commit_entry['sha'])} """ ), } # Normalize line endings to the OS-specific line ending unreleased_changes = str.join( os.linesep, [ line.replace("\r", "") for line in unreleased_change_variants[changelog_format].split("\n") ], ) # Generate the expected changelog content (os aware because of insertion flag & initial parts) expected_changelog_content = str.join( insertion_flag, [ initial_changelog_parts[0], str.join( "", [ os.linesep, # Unreleased changes unreleased_changes, # Previous release notes initial_changelog_parts[1], ], ), ], ) # Act cli_cmd = [MAIN_PROG_NAME, CHANGELOG_SUBCMD] result = cli_runner.invoke(main, cli_cmd[1:]) # Evaluate assert_successful_exit_code(result, cli_cmd) # Ensure changelog exists assert changelog_file.exists() # Capture the new changelog content (os aware because of expected content) with changelog_file.open(newline=os.linesep) as rfd: actual_content = rfd.read() # Check that the changelog content is the same as before assert expected_changelog_content == actual_content # Just need to test that it works for "a" project, not all @pytest.mark.usefixtures(repo_w_trunk_only_n_prereleases_conventional_commits.__name__) @pytest.mark.parametrize( "args", [("--post-to-release-tag", "v1.99.91910000000000000000000000000")] ) def test_changelog_release_tag_not_in_history( args: list[str], cli_runner: CliRunner, ): # Act cli_cmd = [MAIN_PROG_NAME, CHANGELOG_SUBCMD, *args] result = cli_runner.invoke(main, cli_cmd[1:]) # Evaluate assert_exit_code(2, result, cli_cmd) assert "not in release history" in result.stderr.lower() @pytest.mark.usefixtures(repo_w_trunk_only_n_prereleases_conventional_commits.__name__) @pytest.mark.parametrize( "args", [ ("--post-to-release-tag", "v0.1.0"), # first release ("--post-to-release-tag", "v0.1.1-rc.1"), # second release ("--post-to-release-tag", "v0.2.0"), # latest release ], ) def test_changelog_post_to_release(args: list[str], cli_runner: CliRunner): # Set up a requests HTTP session so we can catch the HTTP calls and ensure they're # made session = Session() session.hooks = {"response": [lambda r, *_, **__: r.raise_for_status()]} mock_adapter = requests_mock.Adapter() mock_adapter.register_uri( method=requests_mock.ANY, url=requests_mock.ANY, json={"id": 10001} ) session.mount("http://", mock_adapter) session.mount("https://", mock_adapter) expected_request_url = "{api_url}/repos/{owner}/{repo_name}/releases".format( api_url=f"https://{EXAMPLE_HVCS_DOMAIN}/api/v3", # GitHub API URL owner=EXAMPLE_REPO_OWNER, repo_name=EXAMPLE_REPO_NAME, ) clean_os_environment = dict( filter( lambda k_v: k_v[1] is not None, { "CI": "true", "PATH": os.getenv("PATH"), "HOME": os.getenv("HOME"), "VIRTUAL_ENV": os.getenv("VIRTUAL_ENV", "./.venv"), **( {} if sys.platform != "win32" else { # Windows Required variables "ALLUSERSAPPDATA": os.getenv("ALLUSERSAPPDATA"), "ALLUSERSPROFILE": os.getenv("ALLUSERSPROFILE"), "APPDATA": os.getenv("APPDATA"), "COMMONPROGRAMFILES": os.getenv("COMMONPROGRAMFILES"), "COMMONPROGRAMFILES(X86)": os.getenv("COMMONPROGRAMFILES(X86)"), "DEFAULTUSERPROFILE": os.getenv("DEFAULTUSERPROFILE"), "HOMEPATH": os.getenv("HOMEPATH"), "PATHEXT": os.getenv("PATHEXT"), "PROFILESFOLDER": os.getenv("PROFILESFOLDER"), "PROGRAMFILES": os.getenv("PROGRAMFILES"), "PROGRAMFILES(X86)": os.getenv("PROGRAMFILES(X86)"), "SYSTEM": os.getenv("SYSTEM"), "SYSTEM16": os.getenv("SYSTEM16"), "SYSTEM32": os.getenv("SYSTEM32"), "SYSTEMDRIVE": os.getenv("SYSTEMDRIVE"), "SYSTEMROOT": os.getenv("SYSTEMROOT"), "TEMP": os.getenv("TEMP"), "TMP": os.getenv("TMP"), "USERPROFILE": os.getenv("USERPROFILE"), "USERSID": os.getenv("USERSID"), "USERNAME": os.getenv("USERNAME"), "WINDIR": os.getenv("WINDIR"), } ), }.items(), ) ) # Patch out env vars that affect changelog URLs but only get set in e.g. # Github actions with mock.patch( # Patching the specific module's reference to the build_requests_session function f"{semantic_release.hvcs.github.__name__}.{semantic_release.hvcs.github.build_requests_session.__name__}", return_value=session, ) as build_requests_session_mock, mock.patch.dict( os.environ, clean_os_environment, clear=True ): # Act cli_cmd = [MAIN_PROG_NAME, CHANGELOG_SUBCMD, *args] result = cli_runner.invoke(main, cli_cmd[1:]) # Evaluate assert_successful_exit_code(result, cli_cmd) assert build_requests_session_mock.called assert mock_adapter.called assert mock_adapter.last_request is not None assert expected_request_url == mock_adapter.last_request.url @pytest.mark.parametrize( "repo_result", [lazy_fixture(repo_w_trunk_only_n_prereleases_conventional_commits.__name__)], ) def test_custom_release_notes_template( repo_result: BuiltRepoResult, get_versions_from_repo_build_def: GetVersionsFromRepoBuildDefFn, use_release_notes_template: UseReleaseNotesTemplateFn, retrieve_runtime_context: RetrieveRuntimeContextFn, post_mocker: Mocker, cli_runner: CliRunner, ) -> None: """Verify the template `.release_notes.md.j2` from `template_dir` is used.""" expected_call_count = 1 version = Version.parse( get_versions_from_repo_build_def(repo_result["definition"])[-1] ) # Setup use_release_notes_template() runtime_context = retrieve_runtime_context(repo_result["repo"]) release_history = get_release_history_from_context(runtime_context) release = release_history.released[version] tag = runtime_context.version_translator.str_to_tag(str(version)) expected_release_notes = ( runtime_context.template_environment.from_string(EXAMPLE_RELEASE_NOTES_TEMPLATE) .render(release=release) .rstrip() + os.linesep ) # ensure normalized line endings after render expected_release_notes = str.join( os.linesep, str.split(expected_release_notes.replace("\r", ""), "\n"), ) # Act cli_cmd = [MAIN_PROG_NAME, CHANGELOG_SUBCMD, "--post-to-release-tag", tag] result = cli_runner.invoke(main, cli_cmd[1:]) # Assert assert_successful_exit_code(result, cli_cmd) assert expected_call_count == post_mocker.call_count assert post_mocker.last_request is not None actual_notes = post_mocker.last_request.json()["body"] assert expected_release_notes == actual_notes @pytest.mark.usefixtures(repo_w_trunk_only_conventional_commits.__name__) def test_changelog_default_on_empty_template_dir( example_changelog_md: Path, changelog_template_dir: Path, example_project_template_dir: Path, update_pyproject_toml: UpdatePyprojectTomlFn, cli_runner: CliRunner, ): # Setup: Make sure default changelog doesn't already exist example_changelog_md.unlink(missing_ok=True) # Setup: Create an empty template directory example_project_template_dir.mkdir(parents=True, exist_ok=True) # Setup: Set the templates directory in the configuration update_pyproject_toml( "tool.semantic_release.changelog.template_dir", str(changelog_template_dir), ) # Act cli_cmd = [MAIN_PROG_NAME, CHANGELOG_SUBCMD] result = cli_runner.invoke(main, cli_cmd[1:]) # Evaluate assert_successful_exit_code(result, cli_cmd) # Check that our default changelog was created because the user's template dir was empty assert example_changelog_md.exists() @pytest.mark.usefixtures(repo_w_trunk_only_conventional_commits.__name__) def test_changelog_default_on_incorrect_config_template_file( example_changelog_md: Path, changelog_template_dir: Path, example_project_template_dir: Path, update_pyproject_toml: UpdatePyprojectTomlFn, cli_runner: CliRunner, ): # Setup: Make sure default changelog doesn't already exist example_changelog_md.unlink(missing_ok=True) # Setup: Create a file of the same name as the template directory example_project_template_dir.parent.mkdir(parents=True, exist_ok=True) example_project_template_dir.touch() # Setup: Set the templates directory as the file in the configuration update_pyproject_toml( "tool.semantic_release.changelog.template_dir", str(changelog_template_dir), ) # Act cli_cmd = [MAIN_PROG_NAME, CHANGELOG_SUBCMD] result = cli_runner.invoke(main, cli_cmd[1:]) # Evaluate assert_successful_exit_code(result, cli_cmd) # Check that our default changelog was created because the user's template dir was empty assert example_changelog_md.exists() @pytest.mark.parametrize("bad_changelog_file_str", ("/etc/passwd", "../../.ssh/id_rsa")) @pytest.mark.usefixtures(repo_w_trunk_only_conventional_commits.__name__) def test_changelog_prevent_malicious_path_traversal_file( update_pyproject_toml: UpdatePyprojectTomlFn, bad_changelog_file_str: str, cli_runner: CliRunner, ): # Setup: A malicious path traversal filepath outside of the repository update_pyproject_toml( "tool.semantic_release.changelog.changelog_file", bad_changelog_file_str, ) # Act cli_cmd = [MAIN_PROG_NAME, "--noop", CHANGELOG_SUBCMD] result = cli_runner.invoke(main, cli_cmd[1:]) # Evaluate assert_exit_code(1, result, cli_cmd) assert ( "Changelog file destination must be inside of the repository directory." in result.stderr ) @pytest.mark.parametrize("template_dir_path", ("~/.ssh", "../../.ssh")) @pytest.mark.usefixtures(repo_w_trunk_only_conventional_commits.__name__) def test_changelog_prevent_external_path_traversal_dir( update_pyproject_toml: UpdatePyprojectTomlFn, template_dir_path: str, cli_runner: CliRunner, ): # Setup: A malicious path traversal filepath outside of the repository update_pyproject_toml( "tool.semantic_release.changelog.template_dir", template_dir_path, ) # Act cli_cmd = [MAIN_PROG_NAME, "--noop", CHANGELOG_SUBCMD] result = cli_runner.invoke(main, cli_cmd[1:]) # Evaluate assert_exit_code(1, result, cli_cmd) assert ( "Template directory must be inside of the repository directory." in result.stderr ) python-semantic-release-9.21.0/tests/e2e/cmd_changelog/test_changelog_custom_parser.py000066400000000000000000000057351475670435200312670ustar00rootroot00000000000000from __future__ import annotations import os from typing import TYPE_CHECKING import pytest from pytest_lazy_fixtures.lazy_fixture import lf as lazy_fixture from semantic_release.changelog.context import ChangelogMode from semantic_release.cli.commands.main import main from tests.const import CHANGELOG_SUBCMD, MAIN_PROG_NAME from tests.fixtures.repos import repo_w_no_tags_conventional_commits from tests.util import ( CustomConventionalParserWithIgnorePatterns, assert_successful_exit_code, ) if TYPE_CHECKING: from pathlib import Path from click.testing import CliRunner from tests.fixtures.example_project import UpdatePyprojectTomlFn, UseCustomParserFn from tests.fixtures.git_repo import BuiltRepoResult, GetCommitDefFn @pytest.mark.parametrize( "repo_result", [lazy_fixture(repo_w_no_tags_conventional_commits.__name__)] ) def test_changelog_custom_parser_remove_from_changelog( repo_result: BuiltRepoResult, cli_runner: CliRunner, update_pyproject_toml: UpdatePyprojectTomlFn, use_custom_parser: UseCustomParserFn, get_commit_def_of_conventional_commit: GetCommitDefFn, changelog_md_file: Path, default_md_changelog_insertion_flag: str, ): """ Given when a changelog filtering custom parser is configured When provided a commit message that matches the ignore syntax Then the commit message is not included in the resulting changelog """ ignored_commit_def = get_commit_def_of_conventional_commit( "chore: do not include me in the changelog" ) # Because we are in init mode, the insertion flag is not present in the changelog # we must take it out manually because our repo generation fixture includes it automatically with changelog_md_file.open(newline=os.linesep) as rfd: # use os.linesep here because the insertion flag is os-specific # but convert the content to universal newlines for comparison expected_changelog_content = ( rfd.read() .replace(f"{default_md_changelog_insertion_flag}{os.linesep}", "") .replace("\r", "") ) # Set the project configurations update_pyproject_toml( "tool.semantic_release.changelog.mode", ChangelogMode.INIT.value ) use_custom_parser( f"{CustomConventionalParserWithIgnorePatterns.__module__}:{CustomConventionalParserWithIgnorePatterns.__name__}" ) # Setup: add the commit to be ignored repo_result["repo"].git.commit(m=ignored_commit_def["msg"], a=True) # Act cli_cmd = [MAIN_PROG_NAME, CHANGELOG_SUBCMD] result = cli_runner.invoke(main, cli_cmd[1:]) # Take measurement after action actual_content = changelog_md_file.read_text() # Evaluate assert_successful_exit_code(result, cli_cmd) # Verify that the changelog content does not include our commit assert ignored_commit_def["desc"] not in actual_content # Verify that the changelog content has not changed assert expected_changelog_content == actual_content python-semantic-release-9.21.0/tests/e2e/cmd_changelog/test_changelog_parsing.py000066400000000000000000000107761475670435200300450ustar00rootroot00000000000000from __future__ import annotations import os import shutil from pathlib import Path from re import MULTILINE, compile as regexp from typing import TYPE_CHECKING import pytest from pytest_lazy_fixtures.lazy_fixture import lf as lazy_fixture from semantic_release.changelog.context import ChangelogMode from semantic_release.cli.commands.main import main from semantic_release.cli.const import JINJA2_EXTENSION from tests.const import CHANGELOG_SUBCMD, MAIN_PROG_NAME from tests.fixtures.example_project import ( default_changelog_md_template, default_changelog_rst_template, default_md_changelog_insertion_flag, default_rst_changelog_insertion_flag, example_changelog_md, example_changelog_rst, ) from tests.fixtures.repos.git_flow import ( repo_w_git_flow_conventional_commits, repo_w_git_flow_scipy_commits, ) from tests.util import assert_successful_exit_code if TYPE_CHECKING: from click.testing import CliRunner from tests.fixtures.example_project import UpdatePyprojectTomlFn from tests.fixtures.git_repo import BuiltRepoResult @pytest.mark.parametrize( "changelog_file, insertion_flag, default_changelog_template, changes_tpl_file", [ ( # ChangelogOutputFormat.MARKDOWN lazy_fixture(example_changelog_md.__name__), lazy_fixture(default_md_changelog_insertion_flag.__name__), lazy_fixture(default_changelog_md_template.__name__), Path(".components", "changes.md.j2"), ), ( # ChangelogOutputFormat.RESTRUCTURED_TEXT lazy_fixture(example_changelog_rst.__name__), lazy_fixture(default_rst_changelog_insertion_flag.__name__), lazy_fixture(default_changelog_rst_template.__name__), Path(".components", "changes.rst.j2"), ), ], ) @pytest.mark.parametrize( "repo_result", [ pytest.param( lazy_fixture(repo_fixture_name), marks=pytest.mark.comprehensive, ) for repo_fixture_name in [ repo_w_git_flow_conventional_commits.__name__, repo_w_git_flow_scipy_commits.__name__, ] ], ) def test_changelog_parsing_ignore_merge_commits( cli_runner: CliRunner, repo_result: BuiltRepoResult, update_pyproject_toml: UpdatePyprojectTomlFn, example_project_template_dir: Path, changelog_file: Path, insertion_flag: str, default_changelog_template: Path, changes_tpl_file: Path, ): repo = repo_result["repo"] expected_changelog_content = changelog_file.read_text() update_pyproject_toml( "tool.semantic_release.commit_parser_options.ignore_merge_commits", True ) update_pyproject_toml( "tool.semantic_release.changelog.mode", ChangelogMode.UPDATE.value ) update_pyproject_toml( "tool.semantic_release.changelog.insertion_flag", insertion_flag, ) update_pyproject_toml( "tool.semantic_release.changelog.template_dir", str(example_project_template_dir.relative_to(repo.working_dir)), ) update_pyproject_toml( "tool.semantic_release.changelog.exclude_commit_patterns", [ r"""Initial Commit.*""", ], ) # Force custom changelog to be a copy of the default changelog shutil.copytree( src=default_changelog_template.parent, dst=example_project_template_dir, dirs_exist_ok=True, ) # Remove the "unknown" filter from the changelog template to enable Merge commits patch = regexp( r'^(#}{% *for type_, commits in commit_objects) if type_ != "unknown"', MULTILINE, ) changes_file = example_project_template_dir.joinpath(changes_tpl_file) changes_file.write_text(patch.sub(r"\1", changes_file.read_text())) # Make sure the prev_changelog_file is the same as the current changelog changelog_tpl_file = example_project_template_dir.joinpath( changelog_file.name ).with_suffix(str.join("", [changelog_file.suffix, JINJA2_EXTENSION])) changelog_tpl_file.write_text( regexp(r"= ctx.prev_changelog_file").sub( rf'= "{changelog_file.name}"', changelog_tpl_file.read_text() ) ) # Remove the changelog to force re-generation with new configurations os.remove(str(changelog_file.resolve())) # Act cli_cmd = [MAIN_PROG_NAME, CHANGELOG_SUBCMD] result = cli_runner.invoke(main, cli_cmd[1:]) # Evaluate assert_successful_exit_code(result, cli_cmd) assert expected_changelog_content == changelog_file.read_text() python-semantic-release-9.21.0/tests/e2e/cmd_changelog/test_changelog_release_notes.py000066400000000000000000000272511475670435200312260ustar00rootroot00000000000000from __future__ import annotations from datetime import datetime from typing import TYPE_CHECKING import pytest from pytest_lazy_fixtures import lf as lazy_fixture from semantic_release.cli.commands.main import main from semantic_release.version.version import Version from tests.const import CHANGELOG_SUBCMD, EXAMPLE_PROJECT_LICENSE, MAIN_PROG_NAME from tests.fixtures.repos import ( repo_w_github_flow_w_default_release_channel_conventional_commits, repo_w_github_flow_w_feature_release_channel_conventional_commits, repo_w_trunk_only_conventional_commits, repo_w_trunk_only_emoji_commits, repo_w_trunk_only_scipy_commits, ) from tests.util import assert_successful_exit_code if TYPE_CHECKING: from click.testing import CliRunner from requests_mock import Mocker from tests.conftest import GetStableDateNowFn from tests.fixtures.example_project import UpdatePyprojectTomlFn from tests.fixtures.git_repo import ( BuiltRepoResult, GenerateDefaultReleaseNotesFromDefFn, GetCfgValueFromDefFn, GetHvcsClientFromRepoDefFn, GetVersionsFromRepoBuildDefFn, SplitRepoActionsByReleaseTagsFn, ) @pytest.mark.parametrize( "repo_result", [ lazy_fixture(repo_fixture_name) for repo_fixture_name in [ repo_w_trunk_only_conventional_commits.__name__, ] ], ) def test_changelog_latest_release_notes( repo_result: BuiltRepoResult, get_cfg_value_from_def: GetCfgValueFromDefFn, get_versions_from_repo_build_def: GetVersionsFromRepoBuildDefFn, get_hvcs_client_from_repo_def: GetHvcsClientFromRepoDefFn, cli_runner: CliRunner, post_mocker: Mocker, split_repo_actions_by_release_tags: SplitRepoActionsByReleaseTagsFn, generate_default_release_notes_from_def: GenerateDefaultReleaseNotesFromDefFn, ): # Setup repo_def = repo_result["definition"] tag_format_str: str = get_cfg_value_from_def(repo_def, "tag_format_str") # type: ignore[assignment] repo_actions_per_version = split_repo_actions_by_release_tags( repo_definition=repo_def, tag_format_str=tag_format_str, ) all_versions = get_versions_from_repo_build_def(repo_def) latest_release_version = all_versions[-1] release_tag = tag_format_str.format(version=latest_release_version) expected_release_notes = generate_default_release_notes_from_def( version_actions=repo_actions_per_version[release_tag], hvcs=get_hvcs_client_from_repo_def(repo_def), previous_version=( Version.parse(all_versions[-2]) if len(all_versions) > 1 else None ), license_name=EXAMPLE_PROJECT_LICENSE, mask_initial_release=get_cfg_value_from_def(repo_def, "mask_initial_release"), ) # Act cli_cmd = [MAIN_PROG_NAME, CHANGELOG_SUBCMD, "--post-to-release-tag", release_tag] result = cli_runner.invoke(main, cli_cmd[1:]) # Evaluate assert_successful_exit_code(result, cli_cmd) assert post_mocker.call_count == 1 assert post_mocker.last_request is not None request_body = post_mocker.last_request.json() assert "body" in request_body actual_posted_notes = request_body["body"] assert expected_release_notes == actual_posted_notes @pytest.mark.parametrize( "repo_result, mask_initial_release", [ ( lazy_fixture(repo_w_trunk_only_conventional_commits.__name__), True, ), pytest.param( lazy_fixture(repo_w_trunk_only_conventional_commits.__name__), False, marks=pytest.mark.comprehensive, ), *[ pytest.param( lazy_fixture(repo_fixture_name), mask_initial_release, marks=pytest.mark.comprehensive, ) for mask_initial_release in [True, False] for repo_fixture_name in [ repo_w_github_flow_w_default_release_channel_conventional_commits.__name__, ] ], ], ) def test_changelog_previous_release_notes( repo_result: BuiltRepoResult, mask_initial_release: bool, get_cfg_value_from_def: GetCfgValueFromDefFn, get_versions_from_repo_build_def: GetVersionsFromRepoBuildDefFn, get_hvcs_client_from_repo_def: GetHvcsClientFromRepoDefFn, cli_runner: CliRunner, post_mocker: Mocker, split_repo_actions_by_release_tags: SplitRepoActionsByReleaseTagsFn, generate_default_release_notes_from_def: GenerateDefaultReleaseNotesFromDefFn, update_pyproject_toml: UpdatePyprojectTomlFn, ): # Setup repo_def = repo_result["definition"] tag_format_str: str = get_cfg_value_from_def(repo_def, "tag_format_str") # type: ignore[assignment] repo_actions_per_version = split_repo_actions_by_release_tags( repo_definition=repo_def, tag_format_str=tag_format_str, ) # Extract all versions except for the latest one all_prev_versions = get_versions_from_repo_build_def(repo_def)[:-1] latest_release_version = all_prev_versions[-1] release_tag = tag_format_str.format(version=latest_release_version) expected_release_notes = generate_default_release_notes_from_def( version_actions=repo_actions_per_version[release_tag], hvcs=get_hvcs_client_from_repo_def(repo_def), previous_version=( Version.parse(all_prev_versions[-2]) if len(all_prev_versions) > 1 else None ), license_name=EXAMPLE_PROJECT_LICENSE, mask_initial_release=mask_initial_release, ) update_pyproject_toml( "tool.semantic_release.changelog.default_templates.mask_initial_release", mask_initial_release, ) # Act cli_cmd = [MAIN_PROG_NAME, CHANGELOG_SUBCMD, "--post-to-release-tag", release_tag] result = cli_runner.invoke(main, cli_cmd[1:]) # Evaluate assert_successful_exit_code(result, cli_cmd) assert post_mocker.call_count == 1 assert post_mocker.last_request is not None request_body = post_mocker.last_request.json() assert "body" in request_body actual_posted_notes = request_body["body"] assert expected_release_notes == actual_posted_notes @pytest.mark.parametrize( "repo_result, cache_key, mask_initial_release, license_name", [ ( lazy_fixture(repo_w_trunk_only_conventional_commits.__name__), f"psr/repos/{repo_w_trunk_only_conventional_commits.__name__}", True, "BSD-3-Clause", ), pytest.param( lazy_fixture(repo_w_trunk_only_conventional_commits.__name__), f"psr/repos/{repo_w_trunk_only_conventional_commits.__name__}", False, "BSD-3-Clause", marks=pytest.mark.comprehensive, ), *[ pytest.param( lazy_fixture(repo_fixture_name), f"psr/repos/{repo_fixture_name}", mask_initial_release, "BSD-3-Clause", marks=pytest.mark.comprehensive, ) for mask_initial_release in [True, False] for repo_fixture_name in [ repo_w_trunk_only_emoji_commits.__name__, repo_w_trunk_only_scipy_commits.__name__, # Add more repos here if needed # github_flow had issues as its hard to generate the release notes from squash commits repo_w_github_flow_w_feature_release_channel_conventional_commits.__name__, ] ], ], ) def test_changelog_release_notes_license_change( repo_result: BuiltRepoResult, license_name: str, mask_initial_release: bool, get_cfg_value_from_def: GetCfgValueFromDefFn, get_versions_from_repo_build_def: GetVersionsFromRepoBuildDefFn, get_hvcs_client_from_repo_def: GetHvcsClientFromRepoDefFn, cli_runner: CliRunner, post_mocker: Mocker, split_repo_actions_by_release_tags: SplitRepoActionsByReleaseTagsFn, generate_default_release_notes_from_def: GenerateDefaultReleaseNotesFromDefFn, update_pyproject_toml: UpdatePyprojectTomlFn, cache: pytest.Cache, cache_key: str, stable_now_date: GetStableDateNowFn, ): # Setup repo_def = repo_result["definition"] tag_format_str: str = get_cfg_value_from_def(repo_def, "tag_format_str") # type: ignore[assignment] if not (repo_build_data := cache.get(cache_key, None)): pytest.fail("Repo build date not found in cache") repo_build_datetime = datetime.strptime(repo_build_data["build_date"], "%Y-%m-%d") now_datetime = stable_now_date().replace( year=repo_build_datetime.year, month=repo_build_datetime.month, day=repo_build_datetime.day, ) repo_actions_per_version = split_repo_actions_by_release_tags( repo_definition=repo_def, tag_format_str=tag_format_str, ) # Extract all versions all_versions = get_versions_from_repo_build_def(repo_def) assert len(all_versions) > 1 latest_release_version = all_versions[-1] previous_release_version = all_versions[-2] latest_release_tag = tag_format_str.format(version=latest_release_version) prev_release_tag = tag_format_str.format(version=previous_release_version) expected_release_notes = generate_default_release_notes_from_def( version_actions=repo_actions_per_version[latest_release_tag], hvcs=get_hvcs_client_from_repo_def(repo_def), previous_version=( Version.parse(previous_release_version) if len(all_versions) > 1 else None ), license_name=license_name, mask_initial_release=mask_initial_release, ) expected_prev_release_notes = generate_default_release_notes_from_def( version_actions=repo_actions_per_version[prev_release_tag], hvcs=get_hvcs_client_from_repo_def(repo_def), previous_version=( Version.parse(all_versions[-3]) if len(all_versions) > 2 else None ), license_name=EXAMPLE_PROJECT_LICENSE, mask_initial_release=mask_initial_release, ) update_pyproject_toml( "tool.semantic_release.changelog.default_templates.mask_initial_release", mask_initial_release, ) update_pyproject_toml("project.license-expression", license_name) git_repo = repo_result["repo"] git_repo.git.commit( amend=True, a=True, no_edit=True, date=now_datetime.isoformat(timespec="seconds"), ) with git_repo.git.custom_environment( GIT_COMMITTER_DATE=now_datetime.isoformat(timespec="seconds"), ): git_repo.git.tag(latest_release_tag, d=True) git_repo.git.tag(latest_release_tag, a=True, m=latest_release_tag) # Act cli_cmd = [ MAIN_PROG_NAME, CHANGELOG_SUBCMD, "--post-to-release-tag", latest_release_tag, ] result = cli_runner.invoke(main, cli_cmd[1:]) # Evaluate assert_successful_exit_code(result, cli_cmd) assert post_mocker.call_count == 1 assert post_mocker.last_request is not None request_body = post_mocker.last_request.json() assert "body" in request_body actual_new_posted_notes = request_body["body"] assert expected_release_notes == actual_new_posted_notes # Generate the previous release notes cli_cmd = [ MAIN_PROG_NAME, CHANGELOG_SUBCMD, "--post-to-release-tag", prev_release_tag, ] result = cli_runner.invoke(main, cli_cmd[1:]) # Evaluate assert_successful_exit_code(result, cli_cmd) assert post_mocker.call_count == 2 assert post_mocker.last_request is not None request_body = post_mocker.last_request.json() assert "body" in request_body actual_prev_posted_notes = request_body["body"] assert expected_prev_release_notes == actual_prev_posted_notes assert actual_prev_posted_notes != actual_new_posted_notes python-semantic-release-9.21.0/tests/e2e/cmd_config/000077500000000000000000000000001475670435200222655ustar00rootroot00000000000000python-semantic-release-9.21.0/tests/e2e/cmd_config/__init__.py000066400000000000000000000000001475670435200243640ustar00rootroot00000000000000python-semantic-release-9.21.0/tests/e2e/cmd_config/test_generate_config.py000066400000000000000000000127601475670435200270230ustar00rootroot00000000000000from __future__ import annotations import json from typing import TYPE_CHECKING import pytest import tomlkit from semantic_release.cli.commands.main import main from semantic_release.cli.config import RawConfig from tests.const import GENERATE_CONFIG_SUBCMD, MAIN_PROG_NAME, VERSION_SUBCMD from tests.fixtures.repos import repo_w_no_tags_conventional_commits from tests.util import assert_successful_exit_code if TYPE_CHECKING: from pathlib import Path from typing import Any from click.testing import CliRunner from tests.fixtures.example_project import ExProjectDir @pytest.fixture def raw_config_dict() -> dict[str, Any]: return RawConfig().model_dump(mode="json", exclude_none=True) @pytest.mark.parametrize("args", [(), ("--format", "toml"), ("--format", "TOML")]) @pytest.mark.usefixtures(repo_w_no_tags_conventional_commits.__name__) def test_generate_config_toml( cli_runner: CliRunner, args: tuple[str], raw_config_dict: dict[str, Any], example_project_dir: ExProjectDir, ): # Setup: Generate the expected configuration as a TOML string expected_config_as_str = tomlkit.dumps( {"semantic_release": raw_config_dict} ).strip() # Act: Print the generated configuration to stdout cli_cmd = [MAIN_PROG_NAME, GENERATE_CONFIG_SUBCMD, *args] result = cli_runner.invoke(main, cli_cmd[1:]) # Evaluate: Check that the command ran successfully and that the output matches the expected configuration assert_successful_exit_code(result, cli_cmd) assert expected_config_as_str == result.output.strip() # Setup: Write the generated configuration to a file config_file = "releaserc.toml" example_project_dir.joinpath(config_file).write_text(result.output) # Act: Validate that the generated config is a valid configuration for PSR cli_cmd = [ MAIN_PROG_NAME, "--noop", "--strict", "-c", config_file, VERSION_SUBCMD, "--print", ] result = cli_runner.invoke(main, cli_cmd[1:]) # Evaluate: Check that the version command in noop mode ran successfully # which means PSR loaded the configuration successfully assert_successful_exit_code(result, cli_cmd) @pytest.mark.parametrize("args", [("--format", "json"), ("--format", "JSON")]) @pytest.mark.usefixtures(repo_w_no_tags_conventional_commits.__name__) def test_generate_config_json( cli_runner: CliRunner, args: tuple[str], raw_config_dict: dict[str, Any], example_project_dir: ExProjectDir, ): # Setup: Generate the expected configuration as a JSON string expected_config_as_str = json.dumps( {"semantic_release": raw_config_dict}, indent=4 ).strip() # Act: Print the generated configuration to stdout cli_cmd = [MAIN_PROG_NAME, GENERATE_CONFIG_SUBCMD, *args] result = cli_runner.invoke(main, cli_cmd[1:]) # Evaluate: Check that the command ran successfully and that the output matches the expected configuration assert_successful_exit_code(result, cli_cmd) assert expected_config_as_str == result.output.strip() # Setup: Write the generated configuration to a file config_file = "releaserc.json" example_project_dir.joinpath(config_file).write_text(result.output) # Act: Validate that the generated config is a valid configuration for PSR cli_cmd = [ MAIN_PROG_NAME, "--noop", "--strict", "-c", config_file, VERSION_SUBCMD, "--print", ] result = cli_runner.invoke(main, cli_cmd[1:]) # Evaluate: Check that the version command in noop mode ran successfully # which means PSR loaded the configuration successfully assert_successful_exit_code(result, cli_cmd) @pytest.mark.usefixtures(repo_w_no_tags_conventional_commits.__name__) def test_generate_config_pyproject_toml( cli_runner: CliRunner, raw_config_dict: dict[str, Any], example_pyproject_toml: Path, ): # Setup: Generate the expected configuration as a TOML string according to PEP 518 expected_config_as_str = tomlkit.dumps( {"tool": {"semantic_release": raw_config_dict}} ).strip() # Setup: Remove any current configuration from pyproject.toml pyproject_config = tomlkit.loads(example_pyproject_toml.read_text(encoding="utf-8")) pyproject_config.get("tool", {}).pop("semantic_release", None) example_pyproject_toml.write_text(tomlkit.dumps(pyproject_config)) # Act: Print the generated configuration to stdout cli_cmd = [ MAIN_PROG_NAME, GENERATE_CONFIG_SUBCMD, "--format", "toml", "--pyproject", ] result = cli_runner.invoke(main, cli_cmd[1:]) # Evaluate: Check that the command ran successfully and that the output matches the expected configuration assert_successful_exit_code(result, cli_cmd) assert expected_config_as_str == result.output.strip() # Setup: Write the generated configuration to a file example_pyproject_toml.write_text( str.join( "\n\n", [ example_pyproject_toml.read_text(encoding="utf-8").strip(), result.output, ], ) ) # Act: Validate that the generated config is a valid configuration for PSR cli_cmd = [MAIN_PROG_NAME, "--noop", "--strict", VERSION_SUBCMD, "--print"] result = cli_runner.invoke(main, cli_cmd[1:]) # Evaluate: Check that the version command in noop mode ran successfully # which means PSR loaded the configuration successfully assert_successful_exit_code(result, cli_cmd) python-semantic-release-9.21.0/tests/e2e/cmd_publish/000077500000000000000000000000001475670435200224665ustar00rootroot00000000000000python-semantic-release-9.21.0/tests/e2e/cmd_publish/__init__.py000066400000000000000000000000001475670435200245650ustar00rootroot00000000000000python-semantic-release-9.21.0/tests/e2e/cmd_publish/test_publish.py000066400000000000000000000060561475670435200255540ustar00rootroot00000000000000from __future__ import annotations from typing import TYPE_CHECKING from unittest import mock import pytest from pytest_lazy_fixtures.lazy_fixture import lf as lazy_fixture from semantic_release.cli.commands.main import main from semantic_release.hvcs import Github from tests.const import MAIN_PROG_NAME, PUBLISH_SUBCMD from tests.fixtures.repos import repo_w_trunk_only_conventional_commits from tests.util import assert_exit_code, assert_successful_exit_code if TYPE_CHECKING: from typing import Sequence from click.testing import CliRunner from tests.fixtures.git_repo import BuiltRepoResult, GetVersionsFromRepoBuildDefFn @pytest.mark.parametrize("cmd_args", [(), ("--tag", "latest")]) @pytest.mark.parametrize( "repo_result", [lazy_fixture(repo_w_trunk_only_conventional_commits.__name__)] ) def test_publish_latest_uses_latest_tag( repo_result: BuiltRepoResult, cli_runner: CliRunner, cmd_args: Sequence[str], get_versions_from_repo_build_def: GetVersionsFromRepoBuildDefFn, ): latest_version = get_versions_from_repo_build_def(repo_result["definition"])[-1] latest_tag = f"v{latest_version}" with mock.patch.object( Github, Github.upload_dists.__name__, ) as mocked_upload_dists: cli_cmd = [MAIN_PROG_NAME, PUBLISH_SUBCMD, *cmd_args] # Act result = cli_runner.invoke(main, cli_cmd[1:]) # Evaluate assert_successful_exit_code(result, cli_cmd) mocked_upload_dists.assert_called_once_with(tag=latest_tag, dist_glob="dist/*") @pytest.mark.parametrize( "repo_result", [lazy_fixture(repo_w_trunk_only_conventional_commits.__name__)] ) def test_publish_to_tag_uses_tag( repo_result: BuiltRepoResult, cli_runner: CliRunner, get_versions_from_repo_build_def: GetVersionsFromRepoBuildDefFn, ): # Testing a non-latest tag to distinguish from test_publish_latest_uses_latest_tag() previous_version = get_versions_from_repo_build_def(repo_result["definition"])[-2] previous_tag = f"v{previous_version}" with mock.patch.object(Github, Github.upload_dists.__name__) as mocked_upload_dists: cli_cmd = [MAIN_PROG_NAME, PUBLISH_SUBCMD, "--tag", previous_tag] # Act result = cli_runner.invoke(main, cli_cmd[1:]) # Evaluate assert_successful_exit_code(result, cli_cmd) mocked_upload_dists.assert_called_once_with( tag=previous_tag, dist_glob="dist/*" ) @pytest.mark.usefixtures(repo_w_trunk_only_conventional_commits.__name__) def test_publish_fails_on_nonexistant_tag(cli_runner: CliRunner): non_existant_tag = "nonexistant-tag" with mock.patch.object(Github, Github.upload_dists.__name__) as mocked_upload_dists: cli_cmd = [MAIN_PROG_NAME, PUBLISH_SUBCMD, "--tag", non_existant_tag] # Act result = cli_runner.invoke(main, cli_cmd[1:]) # Evaluate assert_exit_code(1, result, cli_cmd) assert ( f"Tag '{non_existant_tag}' not found in local repository!" in result.stderr ) mocked_upload_dists.assert_not_called() python-semantic-release-9.21.0/tests/e2e/cmd_version/000077500000000000000000000000001475670435200225055ustar00rootroot00000000000000python-semantic-release-9.21.0/tests/e2e/cmd_version/__init__.py000066400000000000000000000000001475670435200246040ustar00rootroot00000000000000python-semantic-release-9.21.0/tests/e2e/cmd_version/bump_version/000077500000000000000000000000001475670435200252155ustar00rootroot00000000000000python-semantic-release-9.21.0/tests/e2e/cmd_version/bump_version/__init__.py000066400000000000000000000000001475670435200273140ustar00rootroot00000000000000python-semantic-release-9.21.0/tests/e2e/cmd_version/bump_version/conftest.py000066400000000000000000000034321475670435200274160ustar00rootroot00000000000000from __future__ import annotations import shutil from typing import TYPE_CHECKING import pytest from git import Repo if TYPE_CHECKING: from pathlib import Path from typing import Protocol from tests.fixtures.git_repo import BuildRepoFromDefinitionFn, RepoActionConfigure class InitMirrorRepo4RebuildFn(Protocol): def __call__( self, mirror_repo_dir: Path, configuration_step: RepoActionConfigure, ) -> Path: ... @pytest.fixture(scope="session") def init_mirror_repo_for_rebuild( default_changelog_md_template: Path, default_changelog_rst_template: Path, changelog_template_dir: Path, build_repo_from_definition: BuildRepoFromDefinitionFn, ) -> InitMirrorRepo4RebuildFn: def _init_mirror_repo_for_rebuild( mirror_repo_dir: Path, configuration_step: RepoActionConfigure, ) -> Path: # Create the mirror repo directory mirror_repo_dir.mkdir(exist_ok=True, parents=True) # Initialize mirror repository build_repo_from_definition( dest_dir=mirror_repo_dir, repo_construction_steps=[configuration_step], ) # Force custom changelog to be a copy of the default changelog (md and rst) shutil.copytree( src=default_changelog_md_template.parent, dst=mirror_repo_dir / changelog_template_dir, dirs_exist_ok=True, ) shutil.copytree( src=default_changelog_rst_template.parent, dst=mirror_repo_dir / changelog_template_dir, dirs_exist_ok=True, ) with Repo(mirror_repo_dir) as mirror_git_repo: mirror_git_repo.git.add(str(changelog_template_dir)) return mirror_repo_dir return _init_mirror_repo_for_rebuild python-semantic-release-9.21.0/tests/e2e/cmd_version/bump_version/git_flow/000077500000000000000000000000001475670435200270275ustar00rootroot00000000000000python-semantic-release-9.21.0/tests/e2e/cmd_version/bump_version/git_flow/__init__.py000066400000000000000000000000001475670435200311260ustar00rootroot00000000000000python-semantic-release-9.21.0/tests/e2e/cmd_version/bump_version/git_flow/test_repo_1_channel.py000066400000000000000000000155551475670435200333300ustar00rootroot00000000000000from __future__ import annotations from typing import TYPE_CHECKING import pytest import tomlkit from flatdict import FlatDict from freezegun import freeze_time from semantic_release.cli.commands.main import main from tests.const import ( MAIN_PROG_NAME, VERSION_SUBCMD, ) from tests.fixtures.repos.git_flow import ( repo_w_git_flow_conventional_commits, repo_w_git_flow_emoji_commits, repo_w_git_flow_scipy_commits, ) from tests.util import assert_successful_exit_code, temporary_working_directory if TYPE_CHECKING: from pathlib import Path from unittest.mock import MagicMock from click.testing import CliRunner from requests_mock import Mocker from tests.e2e.cmd_version.bump_version.conftest import InitMirrorRepo4RebuildFn from tests.e2e.conftest import GetSanitizedChangelogContentFn from tests.fixtures.example_project import ExProjectDir from tests.fixtures.git_repo import ( BuildRepoFromDefinitionFn, BuildSpecificRepoFn, CommitConvention, GetGitRepo4DirFn, RepoActionConfigure, RepoActionRelease, RepoActions, SplitRepoActionsByReleaseTagsFn, ) @pytest.mark.parametrize( "repo_fixture_name", [ pytest.param(repo_fixture_name, marks=pytest.mark.comprehensive) for repo_fixture_name in [ repo_w_git_flow_conventional_commits.__name__, repo_w_git_flow_emoji_commits.__name__, repo_w_git_flow_scipy_commits.__name__, ] ], ) def test_gitflow_repo_rebuild_1_channel( repo_fixture_name: str, cli_runner: CliRunner, build_git_flow_repo_w_1_release_channels: BuildSpecificRepoFn, split_repo_actions_by_release_tags: SplitRepoActionsByReleaseTagsFn, init_mirror_repo_for_rebuild: InitMirrorRepo4RebuildFn, example_project_dir: ExProjectDir, git_repo_for_directory: GetGitRepo4DirFn, build_repo_from_definition: BuildRepoFromDefinitionFn, mocked_git_push: MagicMock, post_mocker: Mocker, default_tag_format_str: str, version_py_file: Path, get_sanitized_md_changelog_content: GetSanitizedChangelogContentFn, get_sanitized_rst_changelog_content: GetSanitizedChangelogContentFn, ): # build target repo into a temporary directory target_repo_dir = example_project_dir / repo_fixture_name commit_type: CommitConvention = ( repo_fixture_name.split("commits", 1)[0].split("_")[-2] # type: ignore[assignment] ) target_repo_definition = build_git_flow_repo_w_1_release_channels( repo_name=repo_fixture_name, commit_type=commit_type, dest_dir=target_repo_dir, ) target_git_repo = git_repo_for_directory(target_repo_dir) target_repo_pyproject_toml = FlatDict( tomlkit.loads((target_repo_dir / "pyproject.toml").read_text(encoding="utf-8")), delimiter=".", ) tag_format_str: str = target_repo_pyproject_toml.get( # type: ignore[assignment] "tool.semantic_release.tag_format", default_tag_format_str, ) # split repo actions by release actions releasetags_2_steps: dict[str, list[RepoActions]] = ( split_repo_actions_by_release_tags(target_repo_definition, tag_format_str) ) configuration_step: RepoActionConfigure = releasetags_2_steps.pop("")[0] # type: ignore[assignment] # Create the mirror repo directory mirror_repo_dir = init_mirror_repo_for_rebuild( mirror_repo_dir=(example_project_dir / "mirror"), configuration_step=configuration_step, ) mirror_git_repo = git_repo_for_directory(mirror_repo_dir) # rebuild repo from scratch stopping before each release tag for curr_release_tag, steps in releasetags_2_steps.items(): # make sure mocks are clear mocked_git_push.reset_mock() post_mocker.reset_mock() # Extract expected result from target repo target_git_repo.git.checkout(curr_release_tag, detach=True) expected_md_changelog_content = get_sanitized_md_changelog_content( repo_dir=target_repo_dir ) expected_rst_changelog_content = get_sanitized_rst_changelog_content( repo_dir=target_repo_dir ) expected_pyproject_toml_content = ( target_repo_dir / "pyproject.toml" ).read_text() expected_version_file_content = (target_repo_dir / version_py_file).read_text() expected_release_commit_text = target_git_repo.head.commit.message # In our repo env, start building the repo from the definition build_repo_from_definition( dest_dir=mirror_repo_dir, repo_construction_steps=steps[:-1], # stop before the release step ) release_action_step: RepoActionRelease = steps[-1] # type: ignore[assignment] # Act: run PSR on the repo instead of the RELEASE step with freeze_time( release_action_step["details"]["datetime"] ), temporary_working_directory(mirror_repo_dir): build_metadata_args = ( [ "--build-metadata", release_action_step["details"]["version"].split("+", maxsplit=1)[ -1 ], ] if len(release_action_step["details"]["version"].split("+", maxsplit=1)) > 1 else [] ) cli_cmd = [MAIN_PROG_NAME, "--strict", VERSION_SUBCMD, *build_metadata_args] result = cli_runner.invoke(main, cli_cmd[1:]) # take measurement after running the version command actual_release_commit_text = mirror_git_repo.head.commit.message actual_pyproject_toml_content = (mirror_repo_dir / "pyproject.toml").read_text() actual_version_file_content = (mirror_repo_dir / version_py_file).read_text() actual_md_changelog_content = get_sanitized_md_changelog_content( repo_dir=mirror_repo_dir ) actual_rst_changelog_content = get_sanitized_rst_changelog_content( repo_dir=mirror_repo_dir ) # Evaluate (normal release actions should have occurred as expected) assert_successful_exit_code(result, cli_cmd) # Make sure version file is updated assert expected_pyproject_toml_content == actual_pyproject_toml_content assert expected_version_file_content == actual_version_file_content # Make sure changelog is updated assert expected_md_changelog_content == actual_md_changelog_content assert expected_rst_changelog_content == actual_rst_changelog_content # Make sure commit is created assert expected_release_commit_text == actual_release_commit_text # Make sure tag is created assert curr_release_tag in [tag.name for tag in mirror_git_repo.tags] assert mocked_git_push.call_count == 2 # 1 for commit, 1 for tag assert post_mocker.call_count == 1 # vcs release creation occured python-semantic-release-9.21.0/tests/e2e/cmd_version/bump_version/git_flow/test_repo_2_channels.py000066400000000000000000000157621475670435200335140ustar00rootroot00000000000000from __future__ import annotations from typing import TYPE_CHECKING import pytest import tomlkit from flatdict import FlatDict from freezegun import freeze_time from semantic_release.cli.commands.main import main from tests.const import ( MAIN_PROG_NAME, VERSION_SUBCMD, ) from tests.fixtures.repos.git_flow import ( repo_w_git_flow_w_alpha_prereleases_n_conventional_commits, repo_w_git_flow_w_alpha_prereleases_n_emoji_commits, repo_w_git_flow_w_alpha_prereleases_n_scipy_commits, ) from tests.util import assert_successful_exit_code, temporary_working_directory if TYPE_CHECKING: from pathlib import Path from unittest.mock import MagicMock from click.testing import CliRunner from requests_mock import Mocker from tests.e2e.cmd_version.bump_version.conftest import InitMirrorRepo4RebuildFn from tests.e2e.conftest import GetSanitizedChangelogContentFn from tests.fixtures.example_project import ExProjectDir from tests.fixtures.git_repo import ( BuildRepoFromDefinitionFn, BuildSpecificRepoFn, CommitConvention, GetGitRepo4DirFn, RepoActionConfigure, RepoActionRelease, RepoActions, SplitRepoActionsByReleaseTagsFn, ) @pytest.mark.parametrize( "repo_fixture_name", [ pytest.param(repo_fixture_name, marks=pytest.mark.comprehensive) for repo_fixture_name in [ repo_w_git_flow_w_alpha_prereleases_n_conventional_commits.__name__, repo_w_git_flow_w_alpha_prereleases_n_emoji_commits.__name__, repo_w_git_flow_w_alpha_prereleases_n_scipy_commits.__name__, ] ], ) def test_gitflow_repo_rebuild_2_channels( repo_fixture_name: str, cli_runner: CliRunner, build_git_flow_repo_w_2_release_channels: BuildSpecificRepoFn, split_repo_actions_by_release_tags: SplitRepoActionsByReleaseTagsFn, init_mirror_repo_for_rebuild: InitMirrorRepo4RebuildFn, example_project_dir: ExProjectDir, git_repo_for_directory: GetGitRepo4DirFn, build_repo_from_definition: BuildRepoFromDefinitionFn, mocked_git_push: MagicMock, post_mocker: Mocker, default_tag_format_str: str, version_py_file: Path, get_sanitized_md_changelog_content: GetSanitizedChangelogContentFn, get_sanitized_rst_changelog_content: GetSanitizedChangelogContentFn, ): # build target repo into a temporary directory target_repo_dir = example_project_dir / repo_fixture_name commit_type: CommitConvention = ( repo_fixture_name.split("commits", 1)[0].split("_")[-2] # type: ignore[assignment] ) target_repo_definition = build_git_flow_repo_w_2_release_channels( repo_name=repo_fixture_name, commit_type=commit_type, dest_dir=target_repo_dir, ) target_git_repo = git_repo_for_directory(target_repo_dir) target_repo_pyproject_toml = FlatDict( tomlkit.loads((target_repo_dir / "pyproject.toml").read_text(encoding="utf-8")), delimiter=".", ) tag_format_str: str = target_repo_pyproject_toml.get( # type: ignore[assignment] "tool.semantic_release.tag_format", default_tag_format_str, ) # split repo actions by release actions releasetags_2_steps: dict[str, list[RepoActions]] = ( split_repo_actions_by_release_tags(target_repo_definition, tag_format_str) ) configuration_step: RepoActionConfigure = releasetags_2_steps.pop("")[0] # type: ignore[assignment] # Create the mirror repo directory mirror_repo_dir = init_mirror_repo_for_rebuild( mirror_repo_dir=(example_project_dir / "mirror"), configuration_step=configuration_step, ) mirror_git_repo = git_repo_for_directory(mirror_repo_dir) # rebuild repo from scratch stopping before each release tag for curr_release_tag, steps in releasetags_2_steps.items(): # make sure mocks are clear mocked_git_push.reset_mock() post_mocker.reset_mock() # Extract expected result from target repo target_git_repo.git.checkout(curr_release_tag, detach=True) expected_md_changelog_content = get_sanitized_md_changelog_content( repo_dir=target_repo_dir ) expected_rst_changelog_content = get_sanitized_rst_changelog_content( repo_dir=target_repo_dir ) expected_pyproject_toml_content = ( target_repo_dir / "pyproject.toml" ).read_text() expected_version_file_content = (target_repo_dir / version_py_file).read_text() expected_release_commit_text = target_git_repo.head.commit.message # In our repo env, start building the repo from the definition build_repo_from_definition( dest_dir=mirror_repo_dir, repo_construction_steps=steps[:-1], # stop before the release step ) release_action_step: RepoActionRelease = steps[-1] # type: ignore[assignment] # Act: run PSR on the repo instead of the RELEASE step with freeze_time( release_action_step["details"]["datetime"] ), temporary_working_directory(mirror_repo_dir): build_metadata_args = ( [ "--build-metadata", release_action_step["details"]["version"].split("+", maxsplit=1)[ -1 ], ] if len(release_action_step["details"]["version"].split("+", maxsplit=1)) > 1 else [] ) cli_cmd = [MAIN_PROG_NAME, "--strict", VERSION_SUBCMD, *build_metadata_args] result = cli_runner.invoke(main, cli_cmd[1:]) # take measurement after running the version command actual_release_commit_text = mirror_git_repo.head.commit.message actual_pyproject_toml_content = (mirror_repo_dir / "pyproject.toml").read_text() actual_version_file_content = (mirror_repo_dir / version_py_file).read_text() actual_md_changelog_content = get_sanitized_md_changelog_content( repo_dir=mirror_repo_dir ) actual_rst_changelog_content = get_sanitized_rst_changelog_content( repo_dir=mirror_repo_dir ) # Evaluate (normal release actions should have occurred as expected) assert_successful_exit_code(result, cli_cmd) # Make sure version file is updated assert expected_pyproject_toml_content == actual_pyproject_toml_content assert expected_version_file_content == actual_version_file_content # Make sure changelog is updated assert expected_md_changelog_content == actual_md_changelog_content assert expected_rst_changelog_content == actual_rst_changelog_content # Make sure commit is created assert expected_release_commit_text == actual_release_commit_text # Make sure tag is created assert curr_release_tag in [tag.name for tag in mirror_git_repo.tags] assert mocked_git_push.call_count == 2 # 1 for commit, 1 for tag assert post_mocker.call_count == 1 # vcs release creation occured python-semantic-release-9.21.0/tests/e2e/cmd_version/bump_version/git_flow/test_repo_3_channels.py000066400000000000000000000163151475670435200335100ustar00rootroot00000000000000from __future__ import annotations from typing import TYPE_CHECKING import pytest import tomlkit from flatdict import FlatDict from freezegun import freeze_time from semantic_release.cli.commands.main import main from tests.const import ( MAIN_PROG_NAME, VERSION_SUBCMD, ) from tests.fixtures.repos.git_flow import ( repo_w_git_flow_w_rc_n_alpha_prereleases_n_conventional_commits, repo_w_git_flow_w_rc_n_alpha_prereleases_n_conventional_commits_using_tag_format, repo_w_git_flow_w_rc_n_alpha_prereleases_n_emoji_commits, repo_w_git_flow_w_rc_n_alpha_prereleases_n_scipy_commits, ) from tests.util import assert_successful_exit_code, temporary_working_directory if TYPE_CHECKING: from pathlib import Path from unittest.mock import MagicMock from click.testing import CliRunner from requests_mock import Mocker from tests.e2e.cmd_version.bump_version.conftest import InitMirrorRepo4RebuildFn from tests.e2e.conftest import GetSanitizedChangelogContentFn from tests.fixtures.example_project import ExProjectDir from tests.fixtures.git_repo import ( BuildRepoFromDefinitionFn, BuildSpecificRepoFn, CommitConvention, GetGitRepo4DirFn, RepoActionConfigure, RepoActionRelease, RepoActions, SplitRepoActionsByReleaseTagsFn, ) @pytest.mark.parametrize( "repo_fixture_name", [ pytest.param(repo_fixture_name, marks=pytest.mark.comprehensive) for repo_fixture_name in [ repo_w_git_flow_w_rc_n_alpha_prereleases_n_conventional_commits.__name__, repo_w_git_flow_w_rc_n_alpha_prereleases_n_conventional_commits_using_tag_format.__name__, repo_w_git_flow_w_rc_n_alpha_prereleases_n_emoji_commits.__name__, repo_w_git_flow_w_rc_n_alpha_prereleases_n_scipy_commits.__name__, ] ], ) def test_gitflow_repo_rebuild_3_channels( repo_fixture_name: str, cli_runner: CliRunner, build_git_flow_repo_w_3_release_channels: BuildSpecificRepoFn, split_repo_actions_by_release_tags: SplitRepoActionsByReleaseTagsFn, init_mirror_repo_for_rebuild: InitMirrorRepo4RebuildFn, example_project_dir: ExProjectDir, git_repo_for_directory: GetGitRepo4DirFn, build_repo_from_definition: BuildRepoFromDefinitionFn, mocked_git_push: MagicMock, post_mocker: Mocker, default_tag_format_str: str, version_py_file: Path, get_sanitized_md_changelog_content: GetSanitizedChangelogContentFn, get_sanitized_rst_changelog_content: GetSanitizedChangelogContentFn, ): # build target repo into a temporary directory target_repo_dir = example_project_dir / repo_fixture_name commit_type: CommitConvention = ( repo_fixture_name.split("commits", 1)[0].split("_")[-2] # type: ignore[assignment] ) target_repo_definition = build_git_flow_repo_w_3_release_channels( repo_name=repo_fixture_name, commit_type=commit_type, dest_dir=target_repo_dir, ) target_git_repo = git_repo_for_directory(target_repo_dir) target_repo_pyproject_toml = FlatDict( tomlkit.loads((target_repo_dir / "pyproject.toml").read_text(encoding="utf-8")), delimiter=".", ) tag_format_str: str = target_repo_pyproject_toml.get( # type: ignore[assignment] "tool.semantic_release.tag_format", default_tag_format_str, ) # split repo actions by release actions releasetags_2_steps: dict[str, list[RepoActions]] = ( split_repo_actions_by_release_tags(target_repo_definition, tag_format_str) ) configuration_step: RepoActionConfigure = releasetags_2_steps.pop("")[0] # type: ignore[assignment] # Create the mirror repo directory mirror_repo_dir = init_mirror_repo_for_rebuild( mirror_repo_dir=(example_project_dir / "mirror"), configuration_step=configuration_step, ) mirror_git_repo = git_repo_for_directory(mirror_repo_dir) # rebuild repo from scratch stopping before each release tag for curr_release_tag, steps in releasetags_2_steps.items(): # make sure mocks are clear mocked_git_push.reset_mock() post_mocker.reset_mock() # Extract expected result from target repo target_git_repo.git.checkout(curr_release_tag, detach=True) expected_md_changelog_content = get_sanitized_md_changelog_content( repo_dir=target_repo_dir ) expected_rst_changelog_content = get_sanitized_rst_changelog_content( repo_dir=target_repo_dir ) expected_pyproject_toml_content = ( target_repo_dir / "pyproject.toml" ).read_text() expected_version_file_content = (target_repo_dir / version_py_file).read_text() expected_release_commit_text = target_git_repo.head.commit.message # In our repo env, start building the repo from the definition build_repo_from_definition( dest_dir=mirror_repo_dir, repo_construction_steps=steps[:-1], # stop before the release step ) release_action_step: RepoActionRelease = steps[-1] # type: ignore[assignment] # Act: run PSR on the repo instead of the RELEASE step with freeze_time( release_action_step["details"]["datetime"] ), temporary_working_directory(mirror_repo_dir): build_metadata_args = ( [ "--build-metadata", release_action_step["details"]["version"].split("+", maxsplit=1)[ -1 ], ] if len(release_action_step["details"]["version"].split("+", maxsplit=1)) > 1 else [] ) cli_cmd = [MAIN_PROG_NAME, "--strict", VERSION_SUBCMD, *build_metadata_args] result = cli_runner.invoke(main, cli_cmd[1:]) # take measurement after running the version command actual_release_commit_text = mirror_git_repo.head.commit.message actual_pyproject_toml_content = (mirror_repo_dir / "pyproject.toml").read_text() actual_version_file_content = (mirror_repo_dir / version_py_file).read_text() actual_md_changelog_content = get_sanitized_md_changelog_content( repo_dir=mirror_repo_dir ) actual_rst_changelog_content = get_sanitized_rst_changelog_content( repo_dir=mirror_repo_dir ) # Evaluate (normal release actions should have occurred as expected) assert_successful_exit_code(result, cli_cmd) # Make sure version file is updated assert expected_pyproject_toml_content == actual_pyproject_toml_content assert expected_version_file_content == actual_version_file_content # Make sure changelog is updated assert expected_md_changelog_content == actual_md_changelog_content assert expected_rst_changelog_content == actual_rst_changelog_content # Make sure commit is created assert expected_release_commit_text == actual_release_commit_text # Make sure tag is created assert curr_release_tag in [tag.name for tag in mirror_git_repo.tags] assert mocked_git_push.call_count == 2 # 1 for commit, 1 for tag assert post_mocker.call_count == 1 # vcs release creation occured python-semantic-release-9.21.0/tests/e2e/cmd_version/bump_version/git_flow/test_repo_4_channels.py000066400000000000000000000160501475670435200335050ustar00rootroot00000000000000from __future__ import annotations from typing import TYPE_CHECKING import pytest import tomlkit from flatdict import FlatDict from freezegun import freeze_time from semantic_release.cli.commands.main import main from tests.const import ( MAIN_PROG_NAME, VERSION_SUBCMD, ) from tests.fixtures.repos.git_flow import ( repo_w_git_flow_w_beta_alpha_rev_prereleases_n_conventional_commits, repo_w_git_flow_w_beta_alpha_rev_prereleases_n_emoji_commits, repo_w_git_flow_w_beta_alpha_rev_prereleases_n_scipy_commits, ) from tests.util import assert_successful_exit_code, temporary_working_directory if TYPE_CHECKING: from pathlib import Path from unittest.mock import MagicMock from click.testing import CliRunner from requests_mock import Mocker from tests.e2e.cmd_version.bump_version.conftest import InitMirrorRepo4RebuildFn from tests.e2e.conftest import GetSanitizedChangelogContentFn from tests.fixtures.example_project import ExProjectDir from tests.fixtures.git_repo import ( BuildRepoFromDefinitionFn, BuildSpecificRepoFn, CommitConvention, GetGitRepo4DirFn, RepoActionConfigure, RepoActionRelease, RepoActions, SplitRepoActionsByReleaseTagsFn, ) @pytest.mark.parametrize( "repo_fixture_name", [ pytest.param(repo_fixture_name, marks=pytest.mark.comprehensive) for repo_fixture_name in [ repo_w_git_flow_w_beta_alpha_rev_prereleases_n_conventional_commits.__name__, repo_w_git_flow_w_beta_alpha_rev_prereleases_n_emoji_commits.__name__, repo_w_git_flow_w_beta_alpha_rev_prereleases_n_scipy_commits.__name__, ] ], ) def test_gitflow_repo_rebuild_4_channels( repo_fixture_name: str, cli_runner: CliRunner, build_git_flow_repo_w_4_release_channels: BuildSpecificRepoFn, split_repo_actions_by_release_tags: SplitRepoActionsByReleaseTagsFn, init_mirror_repo_for_rebuild: InitMirrorRepo4RebuildFn, example_project_dir: ExProjectDir, git_repo_for_directory: GetGitRepo4DirFn, build_repo_from_definition: BuildRepoFromDefinitionFn, mocked_git_push: MagicMock, post_mocker: Mocker, default_tag_format_str: str, version_py_file: Path, get_sanitized_md_changelog_content: GetSanitizedChangelogContentFn, get_sanitized_rst_changelog_content: GetSanitizedChangelogContentFn, ): # build target repo into a temporary directory target_repo_dir = example_project_dir / repo_fixture_name commit_type: CommitConvention = ( repo_fixture_name.split("commits", 1)[0].split("_")[-2] # type: ignore[assignment] ) target_repo_definition = build_git_flow_repo_w_4_release_channels( repo_name=repo_fixture_name, commit_type=commit_type, dest_dir=target_repo_dir, ) target_git_repo = git_repo_for_directory(target_repo_dir) target_repo_pyproject_toml = FlatDict( tomlkit.loads((target_repo_dir / "pyproject.toml").read_text(encoding="utf-8")), delimiter=".", ) tag_format_str: str = target_repo_pyproject_toml.get( # type: ignore[assignment] "tool.semantic_release.tag_format", default_tag_format_str, ) # split repo actions by release actions releasetags_2_steps: dict[str, list[RepoActions]] = ( split_repo_actions_by_release_tags(target_repo_definition, tag_format_str) ) configuration_step: RepoActionConfigure = releasetags_2_steps.pop("")[0] # type: ignore[assignment] # Create the mirror repo directory mirror_repo_dir = init_mirror_repo_for_rebuild( mirror_repo_dir=(example_project_dir / "mirror"), configuration_step=configuration_step, ) mirror_git_repo = git_repo_for_directory(mirror_repo_dir) # rebuild repo from scratch stopping before each release tag for curr_release_tag, steps in releasetags_2_steps.items(): # make sure mocks are clear mocked_git_push.reset_mock() post_mocker.reset_mock() # Extract expected result from target repo target_git_repo.git.checkout(curr_release_tag, detach=True) expected_md_changelog_content = get_sanitized_md_changelog_content( repo_dir=target_repo_dir ) expected_rst_changelog_content = get_sanitized_rst_changelog_content( repo_dir=target_repo_dir ) expected_pyproject_toml_content = ( target_repo_dir / "pyproject.toml" ).read_text() expected_version_file_content = (target_repo_dir / version_py_file).read_text() expected_release_commit_text = target_git_repo.head.commit.message # In our repo env, start building the repo from the definition build_repo_from_definition( dest_dir=mirror_repo_dir, repo_construction_steps=steps[:-1], # stop before the release step ) release_action_step: RepoActionRelease = steps[-1] # type: ignore[assignment] # Act: run PSR on the repo instead of the RELEASE step with freeze_time( release_action_step["details"]["datetime"] ), temporary_working_directory(mirror_repo_dir): build_metadata_args = ( [ "--build-metadata", release_action_step["details"]["version"].split("+", maxsplit=1)[ -1 ], ] if len(release_action_step["details"]["version"].split("+", maxsplit=1)) > 1 else [] ) cli_cmd = [MAIN_PROG_NAME, "--strict", VERSION_SUBCMD, *build_metadata_args] result = cli_runner.invoke(main, cli_cmd[1:]) # take measurement after running the version command actual_release_commit_text = mirror_git_repo.head.commit.message actual_pyproject_toml_content = (mirror_repo_dir / "pyproject.toml").read_text() actual_version_file_content = (mirror_repo_dir / version_py_file).read_text() actual_md_changelog_content = get_sanitized_md_changelog_content( repo_dir=mirror_repo_dir ) actual_rst_changelog_content = get_sanitized_rst_changelog_content( repo_dir=mirror_repo_dir ) # Evaluate (normal release actions should have occurred as expected) assert_successful_exit_code(result, cli_cmd) # Make sure version file is updated assert expected_pyproject_toml_content == actual_pyproject_toml_content assert expected_version_file_content == actual_version_file_content # Make sure changelog is updated assert expected_md_changelog_content == actual_md_changelog_content assert expected_rst_changelog_content == actual_rst_changelog_content # Make sure commit is created assert expected_release_commit_text == actual_release_commit_text # Make sure tag is created assert curr_release_tag in [tag.name for tag in mirror_git_repo.tags] assert mocked_git_push.call_count == 2 # 1 for commit, 1 for tag assert post_mocker.call_count == 1 # vcs release creation occured python-semantic-release-9.21.0/tests/e2e/cmd_version/bump_version/github_flow/000077500000000000000000000000001475670435200275265ustar00rootroot00000000000000python-semantic-release-9.21.0/tests/e2e/cmd_version/bump_version/github_flow/__init__.py000066400000000000000000000000001475670435200316250ustar00rootroot00000000000000python-semantic-release-9.21.0/tests/e2e/cmd_version/bump_version/github_flow/test_repo_1_channel.py000066400000000000000000000160651475670435200340240ustar00rootroot00000000000000from __future__ import annotations from typing import TYPE_CHECKING import pytest import tomlkit from flatdict import FlatDict from freezegun import freeze_time from semantic_release.cli.commands.main import main from tests.const import ( MAIN_PROG_NAME, VERSION_SUBCMD, ) from tests.fixtures.repos.github_flow import ( repo_w_github_flow_w_default_release_channel_conventional_commits, repo_w_github_flow_w_default_release_channel_emoji_commits, repo_w_github_flow_w_default_release_channel_scipy_commits, ) from tests.util import assert_successful_exit_code, temporary_working_directory if TYPE_CHECKING: from pathlib import Path from unittest.mock import MagicMock from click.testing import CliRunner from requests_mock import Mocker from tests.e2e.cmd_version.bump_version.conftest import InitMirrorRepo4RebuildFn from tests.e2e.conftest import GetSanitizedChangelogContentFn from tests.fixtures.example_project import ExProjectDir from tests.fixtures.git_repo import ( BuildRepoFromDefinitionFn, BuildSpecificRepoFn, CommitConvention, GetGitRepo4DirFn, RepoActionConfigure, RepoActionRelease, RepoActions, SplitRepoActionsByReleaseTagsFn, ) @pytest.mark.parametrize( "repo_fixture_name", [ pytest.param(repo_fixture_name, marks=pytest.mark.comprehensive) for repo_fixture_name in [ repo_w_github_flow_w_default_release_channel_conventional_commits.__name__, repo_w_github_flow_w_default_release_channel_emoji_commits.__name__, repo_w_github_flow_w_default_release_channel_scipy_commits.__name__, ] ], ) def test_githubflow_repo_rebuild_1_channel( repo_fixture_name: str, cli_runner: CliRunner, build_repo_w_github_flow_w_default_release_channel: BuildSpecificRepoFn, split_repo_actions_by_release_tags: SplitRepoActionsByReleaseTagsFn, init_mirror_repo_for_rebuild: InitMirrorRepo4RebuildFn, example_project_dir: ExProjectDir, git_repo_for_directory: GetGitRepo4DirFn, build_repo_from_definition: BuildRepoFromDefinitionFn, mocked_git_push: MagicMock, post_mocker: Mocker, default_tag_format_str: str, version_py_file: Path, get_sanitized_md_changelog_content: GetSanitizedChangelogContentFn, get_sanitized_rst_changelog_content: GetSanitizedChangelogContentFn, ): # build target repo into a temporary directory target_repo_dir = example_project_dir / repo_fixture_name commit_type: CommitConvention = ( repo_fixture_name.split("commits", 1)[0].split("_")[-2] # type: ignore[assignment] ) target_repo_definition = build_repo_w_github_flow_w_default_release_channel( repo_name=repo_fixture_name, commit_type=commit_type, dest_dir=target_repo_dir, ) target_git_repo = git_repo_for_directory(target_repo_dir) target_repo_pyproject_toml = FlatDict( tomlkit.loads((target_repo_dir / "pyproject.toml").read_text(encoding="utf-8")), delimiter=".", ) tag_format_str: str = target_repo_pyproject_toml.get( # type: ignore[assignment] "tool.semantic_release.tag_format", default_tag_format_str, ) # split repo actions by release actions releasetags_2_steps: dict[str, list[RepoActions]] = ( split_repo_actions_by_release_tags(target_repo_definition, tag_format_str) ) configuration_step: RepoActionConfigure = releasetags_2_steps.pop("")[0] # type: ignore[assignment] # Create the mirror repo directory mirror_repo_dir = init_mirror_repo_for_rebuild( mirror_repo_dir=(example_project_dir / "mirror"), configuration_step=configuration_step, ) mirror_git_repo = git_repo_for_directory(mirror_repo_dir) # rebuild repo from scratch stopping before each release tag for curr_release_tag, steps in releasetags_2_steps.items(): # make sure mocks are clear mocked_git_push.reset_mock() post_mocker.reset_mock() # Extract expected result from target repo target_git_repo.git.checkout(curr_release_tag, detach=True) expected_md_changelog_content = get_sanitized_md_changelog_content( repo_dir=target_repo_dir ) expected_rst_changelog_content = get_sanitized_rst_changelog_content( repo_dir=target_repo_dir ) expected_pyproject_toml_content = ( target_repo_dir / "pyproject.toml" ).read_text() expected_version_file_content = (target_repo_dir / version_py_file).read_text() expected_release_commit_text = target_git_repo.head.commit.message # In our repo env, start building the repo from the definition build_repo_from_definition( dest_dir=mirror_repo_dir, repo_construction_steps=steps[:-1], # stop before the release step ) release_action_step: RepoActionRelease = steps[-1] # type: ignore[assignment] # Act: run PSR on the repo instead of the RELEASE step with freeze_time( release_action_step["details"]["datetime"] ), temporary_working_directory(mirror_repo_dir): build_metadata_args = ( [ "--build-metadata", release_action_step["details"]["version"].split("+", maxsplit=1)[ -1 ], ] if len(release_action_step["details"]["version"].split("+", maxsplit=1)) > 1 else [] ) cli_cmd = [MAIN_PROG_NAME, "--strict", VERSION_SUBCMD, *build_metadata_args] result = cli_runner.invoke(main, cli_cmd[1:]) # take measurement after running the version command actual_release_commit_text = mirror_git_repo.head.commit.message actual_pyproject_toml_content = (mirror_repo_dir / "pyproject.toml").read_text() actual_version_file_content = (mirror_repo_dir / version_py_file).read_text() actual_md_changelog_content = get_sanitized_md_changelog_content( repo_dir=mirror_repo_dir ) actual_rst_changelog_content = get_sanitized_rst_changelog_content( repo_dir=mirror_repo_dir ) # Evaluate (normal release actions should have occurred as expected) assert_successful_exit_code(result, cli_cmd) # Make sure version file is updated assert expected_pyproject_toml_content == actual_pyproject_toml_content assert expected_version_file_content == actual_version_file_content # Make sure changelog is updated assert expected_md_changelog_content == actual_md_changelog_content assert expected_rst_changelog_content == actual_rst_changelog_content # Make sure commit is created assert expected_release_commit_text == actual_release_commit_text # Make sure tag is created assert curr_release_tag in [tag.name for tag in mirror_git_repo.tags] assert mocked_git_push.call_count == 2 # 1 for commit, 1 for tag assert post_mocker.call_count == 1 # vcs release creation occured test_repo_2_channels.py000066400000000000000000000161221475670435200341230ustar00rootroot00000000000000python-semantic-release-9.21.0/tests/e2e/cmd_version/bump_version/github_flowfrom __future__ import annotations from typing import TYPE_CHECKING import pytest import tomlkit from flatdict import FlatDict from freezegun import freeze_time from semantic_release.cli.commands.main import main from tests.const import ( MAIN_PROG_NAME, VERSION_SUBCMD, ) from tests.fixtures.repos.github_flow import ( repo_w_github_flow_w_feature_release_channel_conventional_commits, repo_w_github_flow_w_feature_release_channel_emoji_commits, repo_w_github_flow_w_feature_release_channel_scipy_commits, ) from tests.util import assert_successful_exit_code, temporary_working_directory if TYPE_CHECKING: from pathlib import Path from unittest.mock import MagicMock from click.testing import CliRunner from requests_mock import Mocker from tests.e2e.cmd_version.bump_version.conftest import InitMirrorRepo4RebuildFn from tests.e2e.conftest import GetSanitizedChangelogContentFn from tests.fixtures.example_project import ExProjectDir from tests.fixtures.git_repo import ( BuildRepoFromDefinitionFn, BuildSpecificRepoFn, CommitConvention, GetGitRepo4DirFn, RepoActionConfigure, RepoActionRelease, RepoActions, SplitRepoActionsByReleaseTagsFn, ) @pytest.mark.parametrize( "repo_fixture_name", [ pytest.param(repo_fixture_name, marks=pytest.mark.comprehensive) for repo_fixture_name in [ repo_w_github_flow_w_feature_release_channel_conventional_commits.__name__, repo_w_github_flow_w_feature_release_channel_emoji_commits.__name__, repo_w_github_flow_w_feature_release_channel_scipy_commits.__name__, ] ], ) def test_githubflow_repo_rebuild_2_channels( repo_fixture_name: str, cli_runner: CliRunner, build_repo_w_github_flow_w_feature_release_channel: BuildSpecificRepoFn, split_repo_actions_by_release_tags: SplitRepoActionsByReleaseTagsFn, init_mirror_repo_for_rebuild: InitMirrorRepo4RebuildFn, example_project_dir: ExProjectDir, git_repo_for_directory: GetGitRepo4DirFn, build_repo_from_definition: BuildRepoFromDefinitionFn, mocked_git_push: MagicMock, post_mocker: Mocker, default_tag_format_str: str, version_py_file: Path, get_sanitized_md_changelog_content: GetSanitizedChangelogContentFn, get_sanitized_rst_changelog_content: GetSanitizedChangelogContentFn, ): # build target repo into a temporary directory target_repo_dir = example_project_dir / repo_fixture_name commit_type: CommitConvention = ( repo_fixture_name.split("commits", 1)[0].split("_")[-2] # type: ignore[assignment] # type: ignore[assignment] ) target_repo_definition = build_repo_w_github_flow_w_feature_release_channel( repo_name=repo_fixture_name, commit_type=commit_type, dest_dir=target_repo_dir, ) target_git_repo = git_repo_for_directory(target_repo_dir) target_repo_pyproject_toml = FlatDict( tomlkit.loads((target_repo_dir / "pyproject.toml").read_text(encoding="utf-8")), delimiter=".", ) tag_format_str: str = target_repo_pyproject_toml.get( # type: ignore[assignment] "tool.semantic_release.tag_format", default_tag_format_str, ) # split repo actions by release actions releasetags_2_steps: dict[str, list[RepoActions]] = ( split_repo_actions_by_release_tags(target_repo_definition, tag_format_str) ) configuration_step: RepoActionConfigure = releasetags_2_steps.pop("")[0] # type: ignore[assignment] # Create the mirror repo directory mirror_repo_dir = init_mirror_repo_for_rebuild( mirror_repo_dir=(example_project_dir / "mirror"), configuration_step=configuration_step, ) mirror_git_repo = git_repo_for_directory(mirror_repo_dir) # rebuild repo from scratch stopping before each release tag for curr_release_tag, steps in releasetags_2_steps.items(): # make sure mocks are clear mocked_git_push.reset_mock() post_mocker.reset_mock() # Extract expected result from target repo target_git_repo.git.checkout(curr_release_tag, detach=True) expected_md_changelog_content = get_sanitized_md_changelog_content( repo_dir=target_repo_dir ) expected_rst_changelog_content = get_sanitized_rst_changelog_content( repo_dir=target_repo_dir ) expected_pyproject_toml_content = ( target_repo_dir / "pyproject.toml" ).read_text() expected_version_file_content = (target_repo_dir / version_py_file).read_text() expected_release_commit_text = target_git_repo.head.commit.message # In our repo env, start building the repo from the definition build_repo_from_definition( dest_dir=mirror_repo_dir, repo_construction_steps=steps[:-1], # stop before the release step ) release_action_step: RepoActionRelease = steps[-1] # type: ignore[assignment] # Act: run PSR on the repo instead of the RELEASE step with freeze_time( release_action_step["details"]["datetime"] ), temporary_working_directory(mirror_repo_dir): build_metadata_args = ( [ "--build-metadata", release_action_step["details"]["version"].split("+", maxsplit=1)[ -1 ], ] if len(release_action_step["details"]["version"].split("+", maxsplit=1)) > 1 else [] ) cli_cmd = [MAIN_PROG_NAME, "--strict", VERSION_SUBCMD, *build_metadata_args] result = cli_runner.invoke(main, cli_cmd[1:]) # take measurement after running the version command actual_release_commit_text = mirror_git_repo.head.commit.message actual_pyproject_toml_content = (mirror_repo_dir / "pyproject.toml").read_text() actual_version_file_content = (mirror_repo_dir / version_py_file).read_text() actual_md_changelog_content = get_sanitized_md_changelog_content( repo_dir=mirror_repo_dir ) actual_rst_changelog_content = get_sanitized_rst_changelog_content( repo_dir=mirror_repo_dir ) # Evaluate (normal release actions should have occurred as expected) assert_successful_exit_code(result, cli_cmd) # Make sure version file is updated assert expected_pyproject_toml_content == actual_pyproject_toml_content assert expected_version_file_content == actual_version_file_content # Make sure changelog is updated assert expected_md_changelog_content == actual_md_changelog_content assert expected_rst_changelog_content == actual_rst_changelog_content # Make sure commit is created assert expected_release_commit_text == actual_release_commit_text # Make sure tag is created assert curr_release_tag in [tag.name for tag in mirror_git_repo.tags] assert mocked_git_push.call_count == 2 # 1 for commit, 1 for tag assert post_mocker.call_count == 1 # vcs release creation occured python-semantic-release-9.21.0/tests/e2e/cmd_version/bump_version/trunk_based_dev/000077500000000000000000000000001475670435200303545ustar00rootroot00000000000000python-semantic-release-9.21.0/tests/e2e/cmd_version/bump_version/trunk_based_dev/__init__.py000066400000000000000000000000001475670435200324530ustar00rootroot00000000000000python-semantic-release-9.21.0/tests/e2e/cmd_version/bump_version/trunk_based_dev/test_repo_trunk.py000066400000000000000000000156311475670435200341630ustar00rootroot00000000000000from __future__ import annotations from typing import TYPE_CHECKING import pytest import tomlkit from flatdict import FlatDict from freezegun import freeze_time from semantic_release.cli.commands.main import main from tests.const import ( MAIN_PROG_NAME, VERSION_SUBCMD, ) from tests.fixtures.repos.trunk_based_dev import ( repo_w_trunk_only_conventional_commits, repo_w_trunk_only_emoji_commits, repo_w_trunk_only_scipy_commits, ) from tests.util import assert_successful_exit_code, temporary_working_directory if TYPE_CHECKING: from pathlib import Path from unittest.mock import MagicMock from click.testing import CliRunner from requests_mock import Mocker from tests.e2e.cmd_version.bump_version.conftest import InitMirrorRepo4RebuildFn from tests.e2e.conftest import GetSanitizedChangelogContentFn from tests.fixtures.example_project import ExProjectDir from tests.fixtures.git_repo import ( BuildRepoFromDefinitionFn, BuildSpecificRepoFn, CommitConvention, GetGitRepo4DirFn, RepoActionConfigure, RepoActionRelease, RepoActions, SplitRepoActionsByReleaseTagsFn, ) @pytest.mark.parametrize( "repo_fixture_name", [ repo_w_trunk_only_conventional_commits.__name__, *[ pytest.param(repo_fixture_name, marks=pytest.mark.comprehensive) for repo_fixture_name in [ repo_w_trunk_only_emoji_commits.__name__, repo_w_trunk_only_scipy_commits.__name__, ] ], ], ) def test_trunk_repo_rebuild_only_official_releases( repo_fixture_name: str, cli_runner: CliRunner, build_trunk_only_repo_w_tags: BuildSpecificRepoFn, split_repo_actions_by_release_tags: SplitRepoActionsByReleaseTagsFn, init_mirror_repo_for_rebuild: InitMirrorRepo4RebuildFn, example_project_dir: ExProjectDir, git_repo_for_directory: GetGitRepo4DirFn, build_repo_from_definition: BuildRepoFromDefinitionFn, mocked_git_push: MagicMock, post_mocker: Mocker, default_tag_format_str: str, version_py_file: Path, get_sanitized_md_changelog_content: GetSanitizedChangelogContentFn, get_sanitized_rst_changelog_content: GetSanitizedChangelogContentFn, ): # build target repo into a temporary directory target_repo_dir = example_project_dir / repo_fixture_name commit_type: CommitConvention = ( repo_fixture_name.split("commits", 1)[0].split("_")[-2] # type: ignore[assignment] ) target_repo_definition = build_trunk_only_repo_w_tags( repo_name=repo_fixture_name, commit_type=commit_type, dest_dir=target_repo_dir, ) target_git_repo = git_repo_for_directory(target_repo_dir) target_repo_pyproject_toml = FlatDict( tomlkit.loads((target_repo_dir / "pyproject.toml").read_text(encoding="utf-8")), delimiter=".", ) tag_format_str: str = target_repo_pyproject_toml.get( # type: ignore[assignment] "tool.semantic_release.tag_format", default_tag_format_str, ) # split repo actions by release actions releasetags_2_steps: dict[str, list[RepoActions]] = ( split_repo_actions_by_release_tags(target_repo_definition, tag_format_str) ) configuration_step: RepoActionConfigure = releasetags_2_steps.pop("")[0] # type: ignore[assignment] # Create the mirror repo directory mirror_repo_dir = init_mirror_repo_for_rebuild( mirror_repo_dir=(example_project_dir / "mirror"), configuration_step=configuration_step, ) mirror_git_repo = git_repo_for_directory(mirror_repo_dir) # rebuild repo from scratch stopping before each release tag for curr_release_tag, steps in releasetags_2_steps.items(): # make sure mocks are clear mocked_git_push.reset_mock() post_mocker.reset_mock() # Extract expected result from target repo target_git_repo.git.checkout(curr_release_tag, detach=True) expected_md_changelog_content = get_sanitized_md_changelog_content( repo_dir=target_repo_dir ) expected_rst_changelog_content = get_sanitized_rst_changelog_content( repo_dir=target_repo_dir ) expected_pyproject_toml_content = ( target_repo_dir / "pyproject.toml" ).read_text() expected_version_file_content = (target_repo_dir / version_py_file).read_text() expected_release_commit_text = target_git_repo.head.commit.message # In our repo env, start building the repo from the definition build_repo_from_definition( dest_dir=mirror_repo_dir, repo_construction_steps=steps[:-1], # stop before the release step ) release_action_step: RepoActionRelease = steps[-1] # type: ignore[assignment] # Act: run PSR on the repo instead of the RELEASE step with freeze_time( release_action_step["details"]["datetime"] ), temporary_working_directory(mirror_repo_dir): build_metadata_args = ( [ "--build-metadata", release_action_step["details"]["version"].split("+", maxsplit=1)[ -1 ], ] if len(release_action_step["details"]["version"].split("+", maxsplit=1)) > 1 else [] ) cli_cmd = [MAIN_PROG_NAME, "--strict", VERSION_SUBCMD, *build_metadata_args] result = cli_runner.invoke(main, cli_cmd[1:]) # take measurement after running the version command actual_release_commit_text = mirror_git_repo.head.commit.message actual_pyproject_toml_content = (mirror_repo_dir / "pyproject.toml").read_text() actual_version_file_content = (mirror_repo_dir / version_py_file).read_text() actual_md_changelog_content = get_sanitized_md_changelog_content( repo_dir=mirror_repo_dir ) actual_rst_changelog_content = get_sanitized_rst_changelog_content( repo_dir=mirror_repo_dir ) # Evaluate (normal release actions should have occurred as expected) assert_successful_exit_code(result, cli_cmd) # Make sure version file is updated assert expected_pyproject_toml_content == actual_pyproject_toml_content assert expected_version_file_content == actual_version_file_content # Make sure changelog is updated assert expected_md_changelog_content == actual_md_changelog_content assert expected_rst_changelog_content == actual_rst_changelog_content # Make sure commit is created assert expected_release_commit_text == actual_release_commit_text # Make sure tag is created assert curr_release_tag in [tag.name for tag in mirror_git_repo.tags] assert mocked_git_push.call_count == 2 # 1 for commit, 1 for tag assert post_mocker.call_count == 1 # vcs release creation occured test_repo_trunk_dual_version_support.py000066400000000000000000000163021475670435200404460ustar00rootroot00000000000000python-semantic-release-9.21.0/tests/e2e/cmd_version/bump_version/trunk_based_devfrom __future__ import annotations from typing import TYPE_CHECKING import pytest import tomlkit from flatdict import FlatDict from freezegun import freeze_time from semantic_release.cli.commands.main import main from tests.const import ( DEFAULT_BRANCH_NAME, MAIN_PROG_NAME, VERSION_SUBCMD, ) from tests.fixtures.repos.trunk_based_dev import ( repo_w_trunk_only_dual_version_spt_conventional_commits, repo_w_trunk_only_dual_version_spt_emoji_commits, repo_w_trunk_only_dual_version_spt_scipy_commits, ) from tests.util import assert_successful_exit_code, temporary_working_directory if TYPE_CHECKING: from pathlib import Path from unittest.mock import MagicMock from click.testing import CliRunner from requests_mock import Mocker from tests.e2e.cmd_version.bump_version.conftest import InitMirrorRepo4RebuildFn from tests.e2e.conftest import GetSanitizedChangelogContentFn from tests.fixtures.example_project import ExProjectDir from tests.fixtures.git_repo import ( BuildRepoFromDefinitionFn, BuildSpecificRepoFn, CommitConvention, GetGitRepo4DirFn, RepoActionConfigure, RepoActionRelease, RepoActions, SplitRepoActionsByReleaseTagsFn, ) @pytest.mark.parametrize( "repo_fixture_name", [ pytest.param(repo_fixture_name, marks=pytest.mark.comprehensive) for repo_fixture_name in [ repo_w_trunk_only_dual_version_spt_conventional_commits.__name__, repo_w_trunk_only_dual_version_spt_emoji_commits.__name__, repo_w_trunk_only_dual_version_spt_scipy_commits.__name__, ] ], ) def test_trunk_repo_rebuild_dual_version_spt_official_releases_only( repo_fixture_name: str, cli_runner: CliRunner, build_trunk_only_repo_w_dual_version_support: BuildSpecificRepoFn, split_repo_actions_by_release_tags: SplitRepoActionsByReleaseTagsFn, init_mirror_repo_for_rebuild: InitMirrorRepo4RebuildFn, example_project_dir: ExProjectDir, git_repo_for_directory: GetGitRepo4DirFn, build_repo_from_definition: BuildRepoFromDefinitionFn, mocked_git_push: MagicMock, post_mocker: Mocker, default_tag_format_str: str, version_py_file: Path, get_sanitized_md_changelog_content: GetSanitizedChangelogContentFn, get_sanitized_rst_changelog_content: GetSanitizedChangelogContentFn, ): # build target repo into a temporary directory target_repo_dir = example_project_dir / repo_fixture_name commit_type: CommitConvention = ( repo_fixture_name.split("commits", 1)[0].split("_")[-2] # type: ignore[assignment] ) target_repo_definition = build_trunk_only_repo_w_dual_version_support( repo_name=repo_fixture_name, commit_type=commit_type, dest_dir=target_repo_dir, ) target_git_repo = git_repo_for_directory(target_repo_dir) target_repo_pyproject_toml = FlatDict( tomlkit.loads((target_repo_dir / "pyproject.toml").read_text(encoding="utf-8")), delimiter=".", ) tag_format_str: str = target_repo_pyproject_toml.get( # type: ignore[assignment] "tool.semantic_release.tag_format", default_tag_format_str, ) # split repo actions by release actions releasetags_2_steps: dict[str, list[RepoActions]] = ( split_repo_actions_by_release_tags(target_repo_definition, tag_format_str) ) configuration_step: RepoActionConfigure = releasetags_2_steps.pop("")[0] # type: ignore[assignment] # Create the mirror repo directory mirror_repo_dir = init_mirror_repo_for_rebuild( mirror_repo_dir=(example_project_dir / "mirror"), configuration_step=configuration_step, ) mirror_git_repo = git_repo_for_directory(mirror_repo_dir) # rebuild repo from scratch stopping before each release tag for curr_release_tag, steps in releasetags_2_steps.items(): # make sure mocks are clear mocked_git_push.reset_mock() post_mocker.reset_mock() # Extract expected result from target repo head_reference_name = ( curr_release_tag if curr_release_tag != "Unreleased" else DEFAULT_BRANCH_NAME ) target_git_repo.git.checkout(head_reference_name, detach=True) expected_md_changelog_content = get_sanitized_md_changelog_content( repo_dir=target_repo_dir ) expected_rst_changelog_content = get_sanitized_rst_changelog_content( repo_dir=target_repo_dir ) expected_pyproject_toml_content = ( target_repo_dir / "pyproject.toml" ).read_text() expected_version_file_content = (target_repo_dir / version_py_file).read_text() expected_release_commit_text = target_git_repo.head.commit.message # In our repo env, start building the repo from the definition build_repo_from_definition( dest_dir=mirror_repo_dir, repo_construction_steps=steps[:-1], # stop before the release step ) release_action_step: RepoActionRelease = steps[-1] # type: ignore[assignment] # Act: run PSR on the repo instead of the RELEASE step with freeze_time( release_action_step["details"]["datetime"] ), temporary_working_directory(mirror_repo_dir): build_metadata_args = ( [ "--build-metadata", release_action_step["details"]["version"].split("+", maxsplit=1)[ -1 ], ] if len(release_action_step["details"]["version"].split("+", maxsplit=1)) > 1 else [] ) cli_cmd = [MAIN_PROG_NAME, "--strict", VERSION_SUBCMD, *build_metadata_args] result = cli_runner.invoke(main, cli_cmd[1:]) # take measurement after running the version command actual_release_commit_text = mirror_git_repo.head.commit.message actual_pyproject_toml_content = (mirror_repo_dir / "pyproject.toml").read_text() actual_version_file_content = (mirror_repo_dir / version_py_file).read_text() actual_md_changelog_content = get_sanitized_md_changelog_content( repo_dir=mirror_repo_dir ) actual_rst_changelog_content = get_sanitized_rst_changelog_content( repo_dir=mirror_repo_dir ) # Evaluate (normal release actions should have occurred as expected) assert_successful_exit_code(result, cli_cmd) # Make sure version file is updated assert expected_pyproject_toml_content == actual_pyproject_toml_content assert expected_version_file_content == actual_version_file_content # Make sure changelog is updated assert expected_md_changelog_content == actual_md_changelog_content assert expected_rst_changelog_content == actual_rst_changelog_content # Make sure commit is created assert expected_release_commit_text == actual_release_commit_text # Make sure tag is created assert curr_release_tag in [tag.name for tag in mirror_git_repo.tags] assert mocked_git_push.call_count == 2 # 1 for commit, 1 for tag assert post_mocker.call_count == 1 # vcs release creation occured test_repo_trunk_dual_version_support_w_prereleases.py000066400000000000000000000176331475670435200433760ustar00rootroot00000000000000python-semantic-release-9.21.0/tests/e2e/cmd_version/bump_version/trunk_based_devfrom __future__ import annotations from typing import TYPE_CHECKING import pytest import tomlkit from flatdict import FlatDict from freezegun import freeze_time from semantic_release.cli.commands.main import main from tests.const import ( DEFAULT_BRANCH_NAME, MAIN_PROG_NAME, VERSION_SUBCMD, ) from tests.fixtures.repos.trunk_based_dev import ( repo_w_trunk_only_dual_version_spt_w_prereleases_conventional_commits, repo_w_trunk_only_dual_version_spt_w_prereleases_emoji_commits, repo_w_trunk_only_dual_version_spt_w_prereleases_scipy_commits, ) from tests.util import assert_successful_exit_code, temporary_working_directory if TYPE_CHECKING: from pathlib import Path from unittest.mock import MagicMock from click.testing import CliRunner from requests_mock import Mocker from tests.e2e.cmd_version.bump_version.conftest import InitMirrorRepo4RebuildFn from tests.e2e.conftest import GetSanitizedChangelogContentFn from tests.fixtures.example_project import ExProjectDir from tests.fixtures.git_repo import ( BuildRepoFromDefinitionFn, BuildSpecificRepoFn, CommitConvention, GetGitRepo4DirFn, RepoActionConfigure, RepoActionRelease, RepoActions, SplitRepoActionsByReleaseTagsFn, ) @pytest.mark.parametrize( "repo_fixture_name", [ pytest.param(repo_fixture_name, marks=pytest.mark.comprehensive) for repo_fixture_name in [ repo_w_trunk_only_dual_version_spt_w_prereleases_conventional_commits.__name__, repo_w_trunk_only_dual_version_spt_w_prereleases_emoji_commits.__name__, repo_w_trunk_only_dual_version_spt_w_prereleases_scipy_commits.__name__, ] ], ) def test_trunk_repo_rebuild_dual_version_spt_w_official_n_prereleases( repo_fixture_name: str, cli_runner: CliRunner, build_trunk_only_repo_w_dual_version_spt_w_prereleases: BuildSpecificRepoFn, split_repo_actions_by_release_tags: SplitRepoActionsByReleaseTagsFn, init_mirror_repo_for_rebuild: InitMirrorRepo4RebuildFn, example_project_dir: ExProjectDir, git_repo_for_directory: GetGitRepo4DirFn, build_repo_from_definition: BuildRepoFromDefinitionFn, mocked_git_push: MagicMock, post_mocker: Mocker, default_tag_format_str: str, version_py_file: Path, get_sanitized_md_changelog_content: GetSanitizedChangelogContentFn, get_sanitized_rst_changelog_content: GetSanitizedChangelogContentFn, ): # build target repo into a temporary directory target_repo_dir = example_project_dir / repo_fixture_name commit_type: CommitConvention = ( repo_fixture_name.split("commits", 1)[0].split("_")[-2] # type: ignore[assignment] ) target_repo_definition = build_trunk_only_repo_w_dual_version_spt_w_prereleases( repo_name=repo_fixture_name, commit_type=commit_type, dest_dir=target_repo_dir, ) target_git_repo = git_repo_for_directory(target_repo_dir) target_repo_pyproject_toml = FlatDict( tomlkit.loads((target_repo_dir / "pyproject.toml").read_text(encoding="utf-8")), delimiter=".", ) tag_format_str: str = target_repo_pyproject_toml.get( # type: ignore[assignment] "tool.semantic_release.tag_format", default_tag_format_str, ) # split repo actions by release actions releasetags_2_steps: dict[str, list[RepoActions]] = ( split_repo_actions_by_release_tags(target_repo_definition, tag_format_str) ) configuration_step: RepoActionConfigure = releasetags_2_steps.pop("")[0] # type: ignore[assignment] # Create the mirror repo directory mirror_repo_dir = init_mirror_repo_for_rebuild( mirror_repo_dir=(example_project_dir / "mirror"), configuration_step=configuration_step, ) mirror_git_repo = git_repo_for_directory(mirror_repo_dir) # rebuild repo from scratch stopping before each release tag for curr_release_tag, steps in releasetags_2_steps.items(): # make sure mocks are clear mocked_git_push.reset_mock() post_mocker.reset_mock() # Extract expected result from target repo head_reference_name = ( curr_release_tag if curr_release_tag != "Unreleased" else DEFAULT_BRANCH_NAME ) target_git_repo.git.checkout(head_reference_name, detach=True) expected_md_changelog_content = get_sanitized_md_changelog_content( repo_dir=target_repo_dir ) expected_rst_changelog_content = get_sanitized_rst_changelog_content( repo_dir=target_repo_dir ) expected_pyproject_toml_content = ( target_repo_dir / "pyproject.toml" ).read_text() expected_version_file_content = (target_repo_dir / version_py_file).read_text() expected_release_commit_text = target_git_repo.head.commit.message # In our repo env, start building the repo from the definition build_repo_from_definition( dest_dir=mirror_repo_dir, repo_construction_steps=steps[:-1], # stop before the release step ) release_action_step: RepoActionRelease = steps[-1] # type: ignore[assignment] # Act: run PSR on the repo instead of the RELEASE step with freeze_time( release_action_step["details"]["datetime"] ), temporary_working_directory(mirror_repo_dir): build_metadata_args = ( [ "--build-metadata", release_action_step["details"]["version"].split("+", maxsplit=1)[ -1 ], ] if len(release_action_step["details"]["version"].split("+", maxsplit=1)) > 1 else [] ) prerelease_args = ( [ "--as-prerelease", "--prerelease-token", ( release_action_step["details"]["version"] .split("-", maxsplit=1)[-1] .split(".", maxsplit=1)[0] ), ] if len(release_action_step["details"]["version"].split("-", maxsplit=1)) > 1 else [] ) cli_cmd = [ MAIN_PROG_NAME, "--strict", VERSION_SUBCMD, *build_metadata_args, *prerelease_args, ] result = cli_runner.invoke(main, cli_cmd[1:]) # take measurement after running the version command actual_release_commit_text = mirror_git_repo.head.commit.message actual_pyproject_toml_content = (mirror_repo_dir / "pyproject.toml").read_text() actual_version_file_content = (mirror_repo_dir / version_py_file).read_text() actual_md_changelog_content = get_sanitized_md_changelog_content( repo_dir=mirror_repo_dir ) actual_rst_changelog_content = get_sanitized_rst_changelog_content( repo_dir=mirror_repo_dir ) # Evaluate (normal release actions should have occurred as expected) assert_successful_exit_code(result, cli_cmd) # Make sure version file is updated assert expected_pyproject_toml_content == actual_pyproject_toml_content assert expected_version_file_content == actual_version_file_content # Make sure changelog is updated assert expected_md_changelog_content == actual_md_changelog_content assert expected_rst_changelog_content == actual_rst_changelog_content # Make sure commit is created assert expected_release_commit_text == actual_release_commit_text # Make sure tag is created assert curr_release_tag in [tag.name for tag in mirror_git_repo.tags] assert mocked_git_push.call_count == 2 # 1 for commit, 1 for tag assert post_mocker.call_count == 1 # vcs release creation occured test_repo_trunk_w_prereleases.py000066400000000000000000000171031475670435200370200ustar00rootroot00000000000000python-semantic-release-9.21.0/tests/e2e/cmd_version/bump_version/trunk_based_devfrom __future__ import annotations from typing import TYPE_CHECKING import pytest import tomlkit from flatdict import FlatDict from freezegun import freeze_time from semantic_release.cli.commands.main import main from tests.const import ( MAIN_PROG_NAME, VERSION_SUBCMD, ) from tests.fixtures.repos.trunk_based_dev import ( repo_w_trunk_only_n_prereleases_conventional_commits, repo_w_trunk_only_n_prereleases_emoji_commits, repo_w_trunk_only_n_prereleases_scipy_commits, ) from tests.util import assert_successful_exit_code, temporary_working_directory if TYPE_CHECKING: from pathlib import Path from unittest.mock import MagicMock from click.testing import CliRunner from requests_mock import Mocker from tests.e2e.cmd_version.bump_version.conftest import InitMirrorRepo4RebuildFn from tests.e2e.conftest import GetSanitizedChangelogContentFn from tests.fixtures.example_project import ExProjectDir from tests.fixtures.git_repo import ( BuildRepoFromDefinitionFn, BuildSpecificRepoFn, CommitConvention, GetGitRepo4DirFn, RepoActionConfigure, RepoActionRelease, RepoActions, SplitRepoActionsByReleaseTagsFn, ) @pytest.mark.parametrize( "repo_fixture_name", [ pytest.param(repo_fixture_name, marks=pytest.mark.comprehensive) for repo_fixture_name in [ repo_w_trunk_only_n_prereleases_conventional_commits.__name__, repo_w_trunk_only_n_prereleases_emoji_commits.__name__, repo_w_trunk_only_n_prereleases_scipy_commits.__name__, ] ], ) def test_trunk_repo_rebuild_w_prereleases( repo_fixture_name: str, cli_runner: CliRunner, build_trunk_only_repo_w_prerelease_tags: BuildSpecificRepoFn, split_repo_actions_by_release_tags: SplitRepoActionsByReleaseTagsFn, init_mirror_repo_for_rebuild: InitMirrorRepo4RebuildFn, example_project_dir: ExProjectDir, git_repo_for_directory: GetGitRepo4DirFn, build_repo_from_definition: BuildRepoFromDefinitionFn, mocked_git_push: MagicMock, post_mocker: Mocker, default_tag_format_str: str, version_py_file: Path, get_sanitized_md_changelog_content: GetSanitizedChangelogContentFn, get_sanitized_rst_changelog_content: GetSanitizedChangelogContentFn, ): # build target repo into a temporary directory target_repo_dir = example_project_dir / repo_fixture_name commit_type: CommitConvention = ( repo_fixture_name.split("commits", 1)[0].split("_")[-2] # type: ignore[assignment] ) target_repo_definition = build_trunk_only_repo_w_prerelease_tags( repo_name=repo_fixture_name, commit_type=commit_type, dest_dir=target_repo_dir, ) target_git_repo = git_repo_for_directory(target_repo_dir) target_repo_pyproject_toml = FlatDict( tomlkit.loads((target_repo_dir / "pyproject.toml").read_text(encoding="utf-8")), delimiter=".", ) tag_format_str: str = target_repo_pyproject_toml.get( # type: ignore[assignment] "tool.semantic_release.tag_format", default_tag_format_str, ) # split repo actions by release actions releasetags_2_steps: dict[str, list[RepoActions]] = ( split_repo_actions_by_release_tags(target_repo_definition, tag_format_str) ) configuration_step: RepoActionConfigure = releasetags_2_steps.pop("")[0] # type: ignore[assignment] # Create the mirror repo directory mirror_repo_dir = init_mirror_repo_for_rebuild( mirror_repo_dir=(example_project_dir / "mirror"), configuration_step=configuration_step, ) mirror_git_repo = git_repo_for_directory(mirror_repo_dir) # rebuild repo from scratch stopping before each release tag for curr_release_tag, steps in releasetags_2_steps.items(): # make sure mocks are clear mocked_git_push.reset_mock() post_mocker.reset_mock() # Extract expected result from target repo target_git_repo.git.checkout(curr_release_tag, detach=True) expected_md_changelog_content = get_sanitized_md_changelog_content( repo_dir=target_repo_dir ) expected_rst_changelog_content = get_sanitized_rst_changelog_content( repo_dir=target_repo_dir ) expected_pyproject_toml_content = ( target_repo_dir / "pyproject.toml" ).read_text() expected_version_file_content = (target_repo_dir / version_py_file).read_text() expected_release_commit_text = target_git_repo.head.commit.message # In our repo env, start building the repo from the definition build_repo_from_definition( dest_dir=mirror_repo_dir, repo_construction_steps=steps[:-1], # stop before the release step ) release_action_step: RepoActionRelease = steps[-1] # type: ignore[assignment] # Act: run PSR on the repo instead of the RELEASE step with freeze_time( release_action_step["details"]["datetime"] ), temporary_working_directory(mirror_repo_dir): build_metadata_args = ( [ "--build-metadata", release_action_step["details"]["version"].split("+", maxsplit=1)[ -1 ], ] if len(release_action_step["details"]["version"].split("+", maxsplit=1)) > 1 else [] ) prerelease_args = ( [ "--as-prerelease", "--prerelease-token", ( release_action_step["details"]["version"] .split("-", maxsplit=1)[-1] .split(".", maxsplit=1)[0] ), ] if len(release_action_step["details"]["version"].split("-", maxsplit=1)) > 1 else [] ) cli_cmd = [ MAIN_PROG_NAME, "--strict", VERSION_SUBCMD, *build_metadata_args, *prerelease_args, ] result = cli_runner.invoke(main, cli_cmd[1:]) # take measurement after running the version command actual_release_commit_text = mirror_git_repo.head.commit.message actual_pyproject_toml_content = (mirror_repo_dir / "pyproject.toml").read_text() actual_version_file_content = (mirror_repo_dir / version_py_file).read_text() actual_md_changelog_content = get_sanitized_md_changelog_content( repo_dir=mirror_repo_dir ) actual_rst_changelog_content = get_sanitized_rst_changelog_content( repo_dir=mirror_repo_dir ) # Evaluate (normal release actions should have occurred as expected) assert_successful_exit_code(result, cli_cmd) # Make sure version file is updated assert expected_pyproject_toml_content == actual_pyproject_toml_content assert expected_version_file_content == actual_version_file_content # Make sure changelog is updated assert expected_md_changelog_content == actual_md_changelog_content assert expected_rst_changelog_content == actual_rst_changelog_content # Make sure commit is created assert expected_release_commit_text == actual_release_commit_text # Make sure tag is created assert curr_release_tag in [tag.name for tag in mirror_git_repo.tags] assert mocked_git_push.call_count == 2 # 1 for commit, 1 for tag assert post_mocker.call_count == 1 # vcs release creation occured python-semantic-release-9.21.0/tests/e2e/cmd_version/test_version.py000066400000000000000000000224271475670435200256120ustar00rootroot00000000000000from __future__ import annotations from pathlib import Path from textwrap import dedent from typing import TYPE_CHECKING import pytest from pytest_lazy_fixtures.lazy_fixture import lf as lazy_fixture from semantic_release.cli.commands.main import main from tests.const import ( MAIN_PROG_NAME, VERSION_SUBCMD, ) from tests.fixtures.repos import ( repo_w_no_tags_conventional_commits, repo_w_trunk_only_conventional_commits, ) from tests.util import assert_successful_exit_code if TYPE_CHECKING: from unittest.mock import MagicMock from click.testing import CliRunner from git import Repo from requests_mock import Mocker from tests.fixtures.example_project import GetWheelFileFn, UpdatePyprojectTomlFn from tests.fixtures.git_repo import BuiltRepoResult, GetVersionsFromRepoBuildDefFn # No-op shouldn't change based on the branching/merging of the repository @pytest.mark.parametrize( "repo_result, next_release_version", # must use a repo that is ready for a release to prevent no release # logic from being triggered before the noop logic [(lazy_fixture(repo_w_no_tags_conventional_commits.__name__), "0.1.0")], ) def test_version_noop_is_noop( repo_result: BuiltRepoResult, next_release_version: str, cli_runner: CliRunner, mocked_git_push: MagicMock, post_mocker: Mocker, get_wheel_file: GetWheelFileFn, ): repo: Repo = repo_result["repo"] build_result_file = get_wheel_file(next_release_version) # Setup: reset any uncommitted changes (if any) repo.git.reset("--hard") # Setup: take measurement before running the version command head_sha_before = repo.head.commit.hexsha tags_before = {tag.name for tag in repo.tags} # Act cli_cmd = [MAIN_PROG_NAME, "--noop", VERSION_SUBCMD] result = cli_runner.invoke(main, cli_cmd[1:]) # take measurement after running the version command head_after = repo.head.commit tags_after = {tag.name for tag in repo.tags} tags_set_difference = set.difference(tags_after, tags_before) # Evaluate (no release actions should have occurred when no bump) assert_successful_exit_code(result, cli_cmd) assert f"{next_release_version}\n" == result.stdout # No commit has been made assert head_sha_before == head_after.hexsha, "HEAD should not have changed" assert not tags_set_difference # No tag created # no build result assert not build_result_file.exists() # no file changes (since no commit was made then just check for non-committed changes) assert not repo.git.status(short=True) assert mocked_git_push.call_count == 0 # no git push of tag or commit assert post_mocker.call_count == 0 # no vcs release @pytest.mark.parametrize( "repo_result", [lazy_fixture(repo_w_trunk_only_conventional_commits.__name__)], ) def test_version_no_git_verify( repo_result: BuiltRepoResult, cli_runner: CliRunner, update_pyproject_toml: UpdatePyprojectTomlFn, mocked_git_push: MagicMock, post_mocker: Mocker, ): repo = repo_result["repo"] # setup: set configuration setting update_pyproject_toml("tool.semantic_release.no_git_verify", True) repo.git.commit( m="chore: adjust project configuration for --no-verify release commits", a=True ) # setup: create executable pre-commit script precommit_hook = Path(repo.git_dir, "hooks", "pre-commit") precommit_hook.parent.mkdir(parents=True, exist_ok=True) precommit_hook.write_text( dedent( """\ #!/bin/sh echo >&2 "Always fail pre-commit" && exit 1; """ ) ) precommit_hook.chmod(0o754) # setup: set git configuration to have the pre-commit hook repo.git.config( "core.hookspath", str(precommit_hook.parent.relative_to(repo.working_dir)), local=True, ) # Take measurement beforehand head_sha_before = repo.head.commit.hexsha tags_before = {tag.name for tag in repo.tags} # Execute cli_cmd = [MAIN_PROG_NAME, VERSION_SUBCMD, "--patch"] result = cli_runner.invoke(main, cli_cmd[1:]) # Take measurement after the command head_after = repo.head.commit tags_after = {tag.name for tag in repo.tags} tags_set_difference = set.difference(tags_after, tags_before) # Evaluate (normal release actions should have occurred when forced patch bump) assert_successful_exit_code(result, cli_cmd) # A commit has been made (regardless of precommit) assert [head_sha_before] == [head.hexsha for head in head_after.parents] assert len(tags_set_difference) == 1 # A tag has been created assert mocked_git_push.call_count == 2 # 1 for commit, 1 for tag assert post_mocker.call_count == 1 # vcs release creation occurred @pytest.mark.parametrize( "repo_result", [lazy_fixture(repo_w_trunk_only_conventional_commits.__name__)] ) def test_version_on_nonrelease_branch( repo_result: BuiltRepoResult, cli_runner: CliRunner, mocked_git_push: MagicMock, post_mocker: Mocker, ): """ Given repo is on a non-release branch, When running the version command, Then no version release should happen which means no code changes, no build, no commit, no tag, no push, and no vcs release creation while returning a successful exit code """ repo = repo_result["repo"] branch = repo.create_head("next").checkout() expected_error_msg = ( f"branch '{branch.name}' isn't in any release groups; no release will be made\n" ) repo_status_before = repo.git.status(short=True) head_before = repo.head.commit.hexsha tags_before = sorted([tag.name for tag in repo.tags]) # Act cli_cmd = [MAIN_PROG_NAME, VERSION_SUBCMD] result = cli_runner.invoke(main, cli_cmd[1:]) # Evaluate (expected -> actual) assert_successful_exit_code(result, cli_cmd) assert not result.stdout assert expected_error_msg == result.stderr # assert nothing else happened (no code changes, no commit, no tag, no push, no vcs release) tags_after = sorted([tag.name for tag in repo.tags]) assert repo_status_before == repo.git.status(short=True) assert head_before == repo.head.commit.hexsha assert tags_before == tags_after assert mocked_git_push.call_count == 0 assert post_mocker.call_count == 0 @pytest.mark.parametrize( "repo_result", [lazy_fixture(repo_w_trunk_only_conventional_commits.__name__)], ) def test_version_on_last_release( repo_result: BuiltRepoResult, get_versions_from_repo_build_def: GetVersionsFromRepoBuildDefFn, cli_runner: CliRunner, mocked_git_push: MagicMock, post_mocker: Mocker, ): """ Given repo is on the last release version, When running the version command, Then no version release should happen which means no code changes, no build, no commit, no tag, no push, and no vcs release creation while returning a successful exit code and printing the last release version """ repo = repo_result["repo"] latest_release_version = get_versions_from_repo_build_def( repo_result["definition"] )[-1] expected_error_msg = ( f"No release will be made, {latest_release_version} has already been released!" ) # Setup: take measurement before running the version command repo_status_before = repo.git.status(short=True) head_before = repo.head.commit.hexsha tags_before = sorted([tag.name for tag in repo.tags]) # Act cli_cmd = [MAIN_PROG_NAME, VERSION_SUBCMD] result = cli_runner.invoke(main, cli_cmd[1:]) # take measurement after running the version command repo_status_after = repo.git.status(short=True) head_after = repo.head.commit.hexsha tags_after = sorted([tag.name for tag in repo.tags]) # Evaluate (expected -> actual) assert_successful_exit_code(result, cli_cmd) assert f"{latest_release_version}\n" == result.stdout assert f"{expected_error_msg}\n" == result.stderr # assert nothing else happened (no code changes, no commit, no tag, no push, no vcs release) assert repo_status_before == repo_status_after assert head_before == head_after assert tags_before == tags_after assert mocked_git_push.call_count == 0 assert post_mocker.call_count == 0 @pytest.mark.parametrize( "repo_result", [lazy_fixture(repo_w_no_tags_conventional_commits.__name__)] ) def test_version_only_tag_push( repo_result: BuiltRepoResult, cli_runner: CliRunner, mocked_git_push: MagicMock, post_mocker: Mocker, ) -> None: """ Given a repo with no tags, When running the version command with the `--no-commit` and `--tag` flags, Then a tag should be created on the current commit, pushed, and a release created. """ repo = repo_result["repo"] # Setup head_before = repo.head.commit # Act cli_cmd = [ MAIN_PROG_NAME, VERSION_SUBCMD, "--no-commit", "--tag", ] result = cli_runner.invoke(main, cli_cmd[1:]) # capture values after the command tag_after = repo.tags[-1].name head_after = repo.head.commit # Assert only tag was created, it was pushed and then release was created assert_successful_exit_code(result, cli_cmd) assert tag_after == "v0.1.0" assert head_before == head_after assert mocked_git_push.call_count == 1 # 0 for commit, 1 for tag assert post_mocker.call_count == 1 python-semantic-release-9.21.0/tests/e2e/cmd_version/test_version_build.py000066400000000000000000000371431475670435200267720ustar00rootroot00000000000000from __future__ import annotations import os import subprocess import sys from pathlib import Path from typing import TYPE_CHECKING from unittest import mock import pytest import shellingham import tomlkit from flatdict import FlatDict from pytest_lazy_fixtures.lazy_fixture import lf as lazy_fixture from semantic_release.cli.commands.main import main from tests.const import MAIN_PROG_NAME, VERSION_SUBCMD from tests.fixtures.repos import repo_w_trunk_only_conventional_commits from tests.util import assert_successful_exit_code, get_func_qual_name if TYPE_CHECKING: from click.testing import CliRunner from tests.fixtures.example_project import GetWheelFileFn, UpdatePyprojectTomlFn from tests.fixtures.git_repo import BuiltRepoResult @pytest.mark.skipif(sys.platform == "win32", reason="Unix only") @pytest.mark.parametrize( "shell", filter( None, [ # because we will actually run the build command in this shell, we must ensure it exists "bash" if list( filter( lambda sh_exe: Path(sh_exe).exists(), ("/bin/bash", "/usr/bin/bash", "/usr/local/bin/bash"), ) ) else "", "zsh" if list( filter( lambda sh_exe: Path(sh_exe).exists(), ("/bin/zsh", "/usr/bin/zsh", "/usr/local/bin/zsh"), ) ) else "", ], ) or ["sh"], ) @pytest.mark.parametrize( "repo_result, cli_args, next_release_version", [ ( lazy_fixture(repo_w_trunk_only_conventional_commits.__name__), ["--patch"], "0.1.2", ) ], ) def test_version_runs_build_command( repo_result: BuiltRepoResult, cli_args: list[str], next_release_version: str, cli_runner: CliRunner, shell: str, get_wheel_file: GetWheelFileFn, example_pyproject_toml: Path, mocked_git_push: mock.MagicMock, post_mocker: mock.Mock, ): # Setup built_wheel_file = get_wheel_file(next_release_version) pyproject_config = FlatDict( tomlkit.loads(example_pyproject_toml.read_text(encoding="utf-8")), delimiter=".", ) build_command = pyproject_config.get("tool.semantic_release.build_command", "") patched_os_environment = { "CI": "true", "PATH": os.getenv("PATH", ""), "HOME": "/home/username", "VIRTUAL_ENV": "./.venv", # Simulate that all CI's are set "GITHUB_ACTIONS": "true", "GITLAB_CI": "true", "GITEA_ACTIONS": "true", "BITBUCKET_REPO_FULL_NAME": "python-semantic-release/python-semantic-release.git", "PSR_DOCKER_GITHUB_ACTION": "true", } # Wrap subprocess.run to capture the arguments to the call with mock.patch( get_func_qual_name(subprocess.run), wraps=subprocess.run, ) as patched_subprocess_run, mock.patch( get_func_qual_name(shellingham.detect_shell), return_value=(shell, shell) ), mock.patch.dict(os.environ, patched_os_environment, clear=True): # ACT: run & force a new version that will trigger the build command cli_cmd = [MAIN_PROG_NAME, VERSION_SUBCMD, *cli_args] result = cli_runner.invoke(main, cli_cmd[1:]) # Evaluate assert_successful_exit_code(result, cli_cmd) patched_subprocess_run.assert_called_with( [shell, "-c", build_command], check=True, env={ "NEW_VERSION": next_release_version, # injected into environment "CI": patched_os_environment["CI"], "BITBUCKET_CI": "true", # Converted "GITHUB_ACTIONS": patched_os_environment["GITHUB_ACTIONS"], "GITEA_ACTIONS": patched_os_environment["GITEA_ACTIONS"], "GITLAB_CI": patched_os_environment["GITLAB_CI"], "HOME": patched_os_environment["HOME"], "PATH": patched_os_environment["PATH"], "VIRTUAL_ENV": patched_os_environment["VIRTUAL_ENV"], "PSR_DOCKER_GITHUB_ACTION": patched_os_environment[ "PSR_DOCKER_GITHUB_ACTION" ], }, ) assert built_wheel_file.exists() assert mocked_git_push.call_count == 2 assert post_mocker.call_count == 1 @pytest.mark.skipif(sys.platform != "win32", reason="Windows only") @pytest.mark.parametrize("shell", ("powershell", "pwsh", "cmd")) @pytest.mark.parametrize( "repo_result, cli_args, next_release_version", [ ( lazy_fixture(repo_w_trunk_only_conventional_commits.__name__), ["--patch"], "0.1.2", ) ], ) def test_version_runs_build_command_windows( repo_result: BuiltRepoResult, cli_args: list[str], next_release_version: str, cli_runner: CliRunner, shell: str, get_wheel_file: GetWheelFileFn, example_pyproject_toml: Path, update_pyproject_toml: UpdatePyprojectTomlFn, mocked_git_push: mock.MagicMock, post_mocker: mock.Mock, ): if shell == "cmd": build_result_file = get_wheel_file("%NEW_VERSION%") update_pyproject_toml( "tool.semantic_release.build_command", str.join( " && ", [ f"mkdir {build_result_file.parent}", f"type nul > {build_result_file}", f"echo 'Built distribution: {build_result_file}'", ], ), ) # Setup built_wheel_file = get_wheel_file(next_release_version) pyproject_config = FlatDict( tomlkit.loads(example_pyproject_toml.read_text(encoding="utf-8")), delimiter=".", ) build_command = pyproject_config.get("tool.semantic_release.build_command", "") patched_os_environment = { "CI": "true", "PATH": os.getenv("PATH", ""), "HOME": "/home/username", "VIRTUAL_ENV": "./.venv", # Simulate that all CI's are set "GITHUB_ACTIONS": "true", "GITLAB_CI": "true", "GITEA_ACTIONS": "true", "BITBUCKET_REPO_FULL_NAME": "python-semantic-release/python-semantic-release.git", "PSR_DOCKER_GITHUB_ACTION": "true", # Windows "ALLUSERSAPPDATA": "C:\\ProgramData", "ALLUSERSPROFILE": "C:\\ProgramData", "APPDATA": "C:\\Users\\Username\\AppData\\Roaming", "COMMONPROGRAMFILES": "C:\\Program Files\\Common Files", "COMMONPROGRAMFILES(X86)": "C:\\Program Files (x86)\\Common Files", "DEFAULTUSERPROFILE": "C:\\Users\\Default", "HOMEPATH": "\\Users\\Username", "PATHEXT": ".COM;.EXE;.BAT;.CMD;.VBS;.VBE;.JS;.JSE;.WSF;.WSH;.MSC", "PROFILESFOLDER": "C:\\Users", "PROGRAMFILES": "C:\\Program Files", "PROGRAMFILES(X86)": "C:\\Program Files (x86)", "SYSTEM": "C:\\Windows\\System32", "SYSTEM16": "C:\\Windows\\System16", "SYSTEM32": "C:\\Windows\\System32", "SYSTEMDRIVE": "C:", "SYSTEMROOT": "C:\\Windows", "TEMP": "C:\\Users\\Username\\AppData\\Local\\Temp", "TMP": "C:\\Users\\Username\\AppData\\Local\\Temp", "USERPROFILE": "C:\\Users\\Username", "USERSID": "S-1-5-21-1234567890-123456789-123456789-1234", "USERNAME": "Username", # must include for python getpass.getuser() on windows "WINDIR": "C:\\Windows", } # Wrap subprocess.run to capture the arguments to the call with mock.patch( get_func_qual_name(subprocess.run), wraps=subprocess.run, ) as patched_subprocess_run, mock.patch( get_func_qual_name(shellingham.detect_shell), return_value=(shell, shell) ), mock.patch.dict(os.environ, patched_os_environment, clear=True): # ACT: run & force a new version that will trigger the build command cli_cmd = [MAIN_PROG_NAME, VERSION_SUBCMD, *cli_args] result = cli_runner.invoke(main, cli_cmd[1:]) # Evaluate assert_successful_exit_code(result, cli_cmd) patched_subprocess_run.assert_called_once_with( [shell, "/c" if shell == "cmd" else "-Command", build_command], check=True, env={ "NEW_VERSION": next_release_version, # injected into environment "CI": patched_os_environment["CI"], "BITBUCKET_CI": "true", # Converted "GITHUB_ACTIONS": patched_os_environment["GITHUB_ACTIONS"], "GITEA_ACTIONS": patched_os_environment["GITEA_ACTIONS"], "GITLAB_CI": patched_os_environment["GITLAB_CI"], "HOME": patched_os_environment["HOME"], "PATH": patched_os_environment["PATH"], "VIRTUAL_ENV": patched_os_environment["VIRTUAL_ENV"], "PSR_DOCKER_GITHUB_ACTION": patched_os_environment[ "PSR_DOCKER_GITHUB_ACTION" ], # Windows "ALLUSERSAPPDATA": patched_os_environment["ALLUSERSAPPDATA"], "ALLUSERSPROFILE": patched_os_environment["ALLUSERSPROFILE"], "APPDATA": patched_os_environment["APPDATA"], "COMMONPROGRAMFILES": patched_os_environment["COMMONPROGRAMFILES"], "COMMONPROGRAMFILES(X86)": patched_os_environment[ "COMMONPROGRAMFILES(X86)" ], "DEFAULTUSERPROFILE": patched_os_environment["DEFAULTUSERPROFILE"], "HOMEPATH": patched_os_environment["HOMEPATH"], "PATHEXT": patched_os_environment["PATHEXT"], "PROFILESFOLDER": patched_os_environment["PROFILESFOLDER"], "PROGRAMFILES": patched_os_environment["PROGRAMFILES"], "PROGRAMFILES(X86)": patched_os_environment["PROGRAMFILES(X86)"], "SYSTEM": patched_os_environment["SYSTEM"], "SYSTEM16": patched_os_environment["SYSTEM16"], "SYSTEM32": patched_os_environment["SYSTEM32"], "SYSTEMDRIVE": patched_os_environment["SYSTEMDRIVE"], "SYSTEMROOT": patched_os_environment["SYSTEMROOT"], "TEMP": patched_os_environment["TEMP"], "TMP": patched_os_environment["TMP"], "USERPROFILE": patched_os_environment["USERPROFILE"], "USERSID": patched_os_environment["USERSID"], "WINDIR": patched_os_environment["WINDIR"], }, ) dist_file_exists = built_wheel_file.exists() assert dist_file_exists, f"\n Expected wheel file to be created at {built_wheel_file}, but it does not exist." assert mocked_git_push.call_count == 2 assert post_mocker.call_count == 1 @pytest.mark.parametrize( "repo_result, cli_args, next_release_version", [ ( lazy_fixture(repo_w_trunk_only_conventional_commits.__name__), ["--patch"], "0.1.2", ) ], ) def test_version_runs_build_command_w_user_env( repo_result: BuiltRepoResult, cli_args: list[str], next_release_version: str, cli_runner: CliRunner, example_pyproject_toml: Path, update_pyproject_toml: UpdatePyprojectTomlFn, ): # Setup patched_os_environment = { "CI": "true", "PATH": os.getenv("PATH", ""), "HOME": "/home/username", "VIRTUAL_ENV": "./.venv", # Windows "USERNAME": "Username", # must include for python getpass.getuser() on windows # Simulate that all CI's are set "GITHUB_ACTIONS": "true", "GITLAB_CI": "true", "GITEA_ACTIONS": "true", "BITBUCKET_REPO_FULL_NAME": "python-semantic-release/python-semantic-release.git", "PSR_DOCKER_GITHUB_ACTION": "true", # User environment variables (varying passthrough results) "MY_CUSTOM_VARIABLE": "custom", "IGNORED_VARIABLE": "ignore_me", "OVERWRITTEN_VAR": "initial", "SET_AS_EMPTY_VAR": "not_empty", } pyproject_config = FlatDict( tomlkit.loads(example_pyproject_toml.read_text(encoding="utf-8")), delimiter=".", ) build_command = pyproject_config.get("tool.semantic_release.build_command", "") update_pyproject_toml( "tool.semantic_release.build_command_env", [ # Includes arbitrary whitespace which will be removed " MY_CUSTOM_VARIABLE ", # detect and pass from environment " OVERWRITTEN_VAR = overrided", # pass hardcoded value which overrides environment " SET_AS_EMPTY_VAR = ", # keep variable initialized but as empty string " HARDCODED_VAR=hardcoded ", # pass hardcoded value that doesn't override anything "VAR_W_EQUALS = a-var===condition", # only splits on 1st equals sign "=ignored-invalid-named-var", # TODO: validation error instead, but currently just ignore ], ) # Mock out subprocess.run with mock.patch( get_func_qual_name(subprocess.run), return_value=subprocess.CompletedProcess(args=(), returncode=0), ) as patched_subprocess_run, mock.patch( get_func_qual_name(shellingham.detect_shell), return_value=("bash", "/usr/bin/bash"), ), mock.patch.dict(os.environ, patched_os_environment, clear=True): cli_cmd = [ MAIN_PROG_NAME, VERSION_SUBCMD, *cli_args, "--no-commit", "--no-tag", "--no-changelog", "--no-push", ] # ACT: run & force a new version that will trigger the build command result = cli_runner.invoke(main, cli_cmd[1:]) # Evaluate # [1] Make sure it did not error internally assert_successful_exit_code(result, cli_cmd) # [2] Make sure the subprocess was called with the correct environment patched_subprocess_run.assert_called_once_with( ["bash", "-c", build_command], check=True, env={ "NEW_VERSION": next_release_version, # injected into environment "CI": patched_os_environment["CI"], "BITBUCKET_CI": "true", # Converted "GITHUB_ACTIONS": patched_os_environment["GITHUB_ACTIONS"], "GITEA_ACTIONS": patched_os_environment["GITEA_ACTIONS"], "GITLAB_CI": patched_os_environment["GITLAB_CI"], "HOME": patched_os_environment["HOME"], "PATH": patched_os_environment["PATH"], "VIRTUAL_ENV": patched_os_environment["VIRTUAL_ENV"], "PSR_DOCKER_GITHUB_ACTION": patched_os_environment[ "PSR_DOCKER_GITHUB_ACTION" ], "MY_CUSTOM_VARIABLE": patched_os_environment["MY_CUSTOM_VARIABLE"], "OVERWRITTEN_VAR": "overrided", "SET_AS_EMPTY_VAR": "", "HARDCODED_VAR": "hardcoded", # Note that IGNORED_VARIABLE is not here. "VAR_W_EQUALS": "a-var===condition", }, ) @pytest.mark.usefixtures(repo_w_trunk_only_conventional_commits.__name__) def test_version_skips_build_command_with_skip_build( cli_runner: CliRunner, mocked_git_push: mock.MagicMock, post_mocker: mock.Mock, ): cli_cmd = [MAIN_PROG_NAME, VERSION_SUBCMD, "--patch", "--skip-build"] with mock.patch( get_func_qual_name(subprocess.run), return_value=subprocess.CompletedProcess(args=(), returncode=0), ) as patched_subprocess_run: # Act: force a new version result = cli_runner.invoke(main, cli_cmd[1:]) # Evaluate assert_successful_exit_code(result, cli_cmd) patched_subprocess_run.assert_not_called() assert mocked_git_push.call_count == 2 assert post_mocker.call_count == 1 python-semantic-release-9.21.0/tests/e2e/cmd_version/test_version_bump.py000066400000000000000000004105401475670435200266320ustar00rootroot00000000000000from __future__ import annotations from datetime import timedelta from itertools import count from pathlib import Path from typing import TYPE_CHECKING import pytest import tomlkit # Limitation in pytest-lazy-fixture - see https://stackoverflow.com/a/69884019 from pytest_lazy_fixtures.lazy_fixture import lf as lazy_fixture from semantic_release.cli.commands.main import main from semantic_release.commit_parser.conventional import ConventionalCommitParser from semantic_release.commit_parser.emoji import EmojiCommitParser from semantic_release.commit_parser.scipy import ScipyCommitParser from tests.const import EXAMPLE_PROJECT_NAME, MAIN_PROG_NAME, VERSION_SUBCMD from tests.fixtures import ( conventional_chore_commits, conventional_major_commits, conventional_minor_commits, conventional_patch_commits, emoji_chore_commits, emoji_major_commits, emoji_minor_commits, emoji_patch_commits, repo_w_git_flow_w_alpha_prereleases_n_conventional_commits, repo_w_git_flow_w_alpha_prereleases_n_emoji_commits, repo_w_git_flow_w_alpha_prereleases_n_scipy_commits, repo_w_git_flow_w_rc_n_alpha_prereleases_n_conventional_commits, repo_w_git_flow_w_rc_n_alpha_prereleases_n_emoji_commits, repo_w_git_flow_w_rc_n_alpha_prereleases_n_scipy_commits, repo_w_github_flow_w_feature_release_channel_conventional_commits, repo_w_initial_commit, repo_w_no_tags_conventional_commits, repo_w_no_tags_emoji_commits, repo_w_no_tags_scipy_commits, repo_w_trunk_only_conventional_commits, repo_w_trunk_only_emoji_commits, repo_w_trunk_only_n_prereleases_conventional_commits, repo_w_trunk_only_n_prereleases_emoji_commits, repo_w_trunk_only_n_prereleases_scipy_commits, repo_w_trunk_only_scipy_commits, scipy_chore_commits, scipy_major_commits, scipy_minor_commits, scipy_patch_commits, ) from tests.util import ( add_text_to_file, assert_successful_exit_code, dynamic_python_import, xdist_sort_hack, ) if TYPE_CHECKING: from unittest.mock import MagicMock from click.testing import CliRunner from requests_mock import Mocker from tests.conftest import GetStableDateNowFn from tests.fixtures.example_project import ExProjectDir, UpdatePyprojectTomlFn from tests.fixtures.git_repo import BuiltRepoResult @pytest.mark.parametrize( "repo_result, cli_args, next_release_version", [ *( ( lazy_fixture(repo_w_no_tags_conventional_commits.__name__), cli_args, next_release_version, ) for cli_args, next_release_version in ( # New build-metadata forces a new release (["--build-metadata", "build.12345"], "0.1.0+build.12345"), # Forced version bump (["--prerelease"], "0.0.0-rc.1"), (["--patch"], "0.0.1"), (["--minor"], "0.1.0"), (["--major"], "1.0.0"), # Forced version bump with --build-metadata (["--patch", "--build-metadata", "build.12345"], "0.0.1+build.12345"), # Forced version bump with --as-prerelease (["--prerelease", "--as-prerelease"], "0.0.0-rc.1"), (["--patch", "--as-prerelease"], "0.0.1-rc.1"), (["--minor", "--as-prerelease"], "0.1.0-rc.1"), (["--major", "--as-prerelease"], "1.0.0-rc.1"), # Forced version bump with --as-prerelease and modified --prerelease-token ( ["--patch", "--as-prerelease", "--prerelease-token", "beta"], "0.0.1-beta.1", ), # Forced version bump with --as-prerelease and modified --prerelease-token # and --build-metadata ( [ "--patch", "--as-prerelease", "--prerelease-token", "beta", "--build-metadata", "build.12345", ], "0.0.1-beta.1+build.12345", ), ) ), *[ pytest.param( lazy_fixture(repo_fixture_name), cli_args, expected_stdout, marks=pytest.mark.comprehensive, ) for repo_fixture_name, values in { repo_w_trunk_only_conventional_commits.__name__: [ # New build-metadata forces a new release (["--build-metadata", "build.12345"], "0.1.1+build.12345"), # Forced version bump (["--prerelease"], "0.1.1-rc.1"), (["--patch"], "0.1.2"), (["--minor"], "0.2.0"), (["--major"], "1.0.0"), # Forced version bump with --build-metadata ( ["--patch", "--build-metadata", "build.12345"], "0.1.2+build.12345", ), # Forced version bump with --as-prerelease (["--prerelease", "--as-prerelease"], "0.1.1-rc.1"), (["--patch", "--as-prerelease"], "0.1.2-rc.1"), (["--minor", "--as-prerelease"], "0.2.0-rc.1"), (["--major", "--as-prerelease"], "1.0.0-rc.1"), # Forced version bump with --as-prerelease and modified --prerelease-token ( ["--patch", "--as-prerelease", "--prerelease-token", "beta"], "0.1.2-beta.1", ), # Forced version bump with --as-prerelease and modified --prerelease-token # and --build-metadata ( [ "--patch", "--as-prerelease", "--prerelease-token", "beta", "--build-metadata", "build.12345", ], "0.1.2-beta.1+build.12345", ), ], repo_w_trunk_only_n_prereleases_conventional_commits.__name__: [ # New build-metadata forces a new release (["--build-metadata", "build.12345"], "0.2.0+build.12345"), # Forced version bump # NOTE: There is already a 0.2.0-rc.1 (["--prerelease"], "0.2.0-rc.2"), (["--patch"], "0.2.1"), (["--minor"], "0.3.0"), (["--major"], "1.0.0"), # Forced version bump with --build-metadata ( ["--patch", "--build-metadata", "build.12345"], "0.2.1+build.12345", ), # Forced version bump with --as-prerelease (["--prerelease", "--as-prerelease"], "0.2.0-rc.2"), (["--patch", "--as-prerelease"], "0.2.1-rc.1"), (["--minor", "--as-prerelease"], "0.3.0-rc.1"), (["--major", "--as-prerelease"], "1.0.0-rc.1"), # Forced version bump with --as-prerelease and modified --prerelease-token ( ["--patch", "--as-prerelease", "--prerelease-token", "beta"], "0.2.1-beta.1", ), # Forced version bump with --as-prerelease and modified --prerelease-token # and --build-metadata ( [ "--patch", "--as-prerelease", "--prerelease-token", "beta", "--build-metadata", "build.12345", ], "0.2.1-beta.1+build.12345", ), ], repo_w_github_flow_w_feature_release_channel_conventional_commits.__name__: [ # New build-metadata forces a new release (["--build-metadata", "build.12345"], "1.1.0+build.12345"), # Forced version bump (["--prerelease"], "1.1.0-rc.1"), (["--patch"], "1.1.1"), (["--minor"], "1.2.0"), (["--major"], "2.0.0"), # Forced version bump with --build-metadata ( ["--patch", "--build-metadata", "build.12345"], "1.1.1+build.12345", ), # Forced version bump with --as-prerelease (["--prerelease", "--as-prerelease"], "1.1.0-rc.1"), (["--patch", "--as-prerelease"], "1.1.1-rc.1"), (["--minor", "--as-prerelease"], "1.2.0-rc.1"), (["--major", "--as-prerelease"], "2.0.0-rc.1"), # Forced version bump with --as-prerelease and modified --prerelease-token ( ["--patch", "--as-prerelease", "--prerelease-token", "beta"], "1.1.1-beta.1", ), # Forced version bump with --as-prerelease and modified --prerelease-token # and --build-metadata ( [ "--patch", "--as-prerelease", "--prerelease-token", "beta", "--build-metadata", "build.12345", ], "1.1.1-beta.1+build.12345", ), ], repo_w_git_flow_w_alpha_prereleases_n_conventional_commits.__name__: [ # New build-metadata forces a new release (["--build-metadata", "build.12345"], "1.2.0-alpha.2+build.12345"), # Forced version bump (["--prerelease"], "1.2.0-alpha.3"), (["--patch"], "1.2.1"), (["--minor"], "1.3.0"), (["--major"], "2.0.0"), # Forced version bump with --build-metadata ( ["--patch", "--build-metadata", "build.12345"], "1.2.1+build.12345", ), # Forced version bump with --as-prerelease (["--prerelease", "--as-prerelease"], "1.2.0-alpha.3"), (["--patch", "--as-prerelease"], "1.2.1-alpha.1"), (["--minor", "--as-prerelease"], "1.3.0-alpha.1"), (["--major", "--as-prerelease"], "2.0.0-alpha.1"), # Forced version bump with --as-prerelease and modified --prerelease-token ( ["--patch", "--as-prerelease", "--prerelease-token", "beta"], "1.2.1-beta.1", ), # Forced version bump with --as-prerelease and modified --prerelease-token # and --build-metadata ( [ "--patch", "--as-prerelease", "--prerelease-token", "beta", "--build-metadata", "build.12345", ], "1.2.1-beta.1+build.12345", ), ], repo_w_git_flow_w_rc_n_alpha_prereleases_n_conventional_commits.__name__: [ # New build-metadata forces a new release (["--build-metadata", "build.12345"], "1.1.0+build.12345"), # Forced version bump (["--prerelease"], "1.1.0-rc.3"), (["--patch"], "1.1.1"), (["--minor"], "1.2.0"), (["--major"], "2.0.0"), # Forced version bump with --build-metadata ( ["--patch", "--build-metadata", "build.12345"], "1.1.1+build.12345", ), # Forced version bump with --as-prerelease (["--prerelease", "--as-prerelease"], "1.1.0-rc.3"), (["--patch", "--as-prerelease"], "1.1.1-rc.1"), (["--minor", "--as-prerelease"], "1.2.0-rc.1"), (["--major", "--as-prerelease"], "2.0.0-rc.1"), # Forced version bump with --as-prerelease and modified --prerelease-token ( ["--patch", "--as-prerelease", "--prerelease-token", "beta"], "1.1.1-beta.1", ), # Forced version bump with --as-prerelease and modified --prerelease-token # and --build-metadata ( [ "--patch", "--as-prerelease", "--prerelease-token", "beta", "--build-metadata", "build.12345", ], "1.1.1-beta.1+build.12345", ), ], }.items() for (cli_args, expected_stdout) in values ], ], ) def test_version_force_level( repo_result: BuiltRepoResult, cli_args: list[str], next_release_version: str, example_project_dir: ExProjectDir, example_pyproject_toml: Path, cli_runner: CliRunner, mocked_git_push: MagicMock, post_mocker: Mocker, ): repo = repo_result["repo"] version_file = example_project_dir.joinpath( "src", EXAMPLE_PROJECT_NAME, "_version.py" ) expected_changed_files = sorted( [ "CHANGELOG.md", "pyproject.toml", str(version_file.relative_to(example_project_dir)), ] ) # Setup: take measurement before running the version command head_sha_before = repo.head.commit.hexsha tags_before = {tag.name for tag in repo.tags} version_py_before = dynamic_python_import( version_file, f"{EXAMPLE_PROJECT_NAME}._version" ).__version__ pyproject_toml_before = tomlkit.loads( example_pyproject_toml.read_text(encoding="utf-8") ) # Modify the pyproject.toml to remove the version so we can compare it later pyproject_toml_before.get("tool", {}).get("poetry").pop("version") # type: ignore[attr-defined] # Act cli_cmd = [MAIN_PROG_NAME, VERSION_SUBCMD, *cli_args] result = cli_runner.invoke(main, cli_cmd[1:]) # take measurement after running the version command head_after = repo.head.commit tags_after = {tag.name for tag in repo.tags} tags_set_difference = set.difference(tags_after, tags_before) differing_files = [ # Make sure filepath uses os specific path separators str(Path(file)) for file in str(repo.git.diff("HEAD", "HEAD~1", name_only=True)).splitlines() ] pyproject_toml_after = tomlkit.loads( example_pyproject_toml.read_text(encoding="utf-8") ) pyproj_version_after = ( pyproject_toml_after.get("tool", {}).get("poetry", {}).pop("version") ) # Load python module for reading the version (ensures the file is valid) version_py_after = dynamic_python_import( version_file, f"{EXAMPLE_PROJECT_NAME}._version" ).__version__ # Evaluate (normal release actions should have occurred when forced patch bump) assert_successful_exit_code(result, cli_cmd) # A commit has been made assert [head_sha_before] == [head.hexsha for head in head_after.parents] assert len(tags_set_difference) == 1 # A tag has been created assert f"v{next_release_version}" in tags_set_difference assert mocked_git_push.call_count == 2 # 1 for commit, 1 for tag assert post_mocker.call_count == 1 # vcs release creation occurred # Changelog already reflects changes this should introduce assert expected_changed_files == differing_files # Compare pyproject.toml assert pyproject_toml_before == pyproject_toml_after assert next_release_version == pyproj_version_after # Compare _version.py assert next_release_version == version_py_after assert version_py_before != version_py_after # NOTE: There is a bit of a corner-case where if we are not doing a # prerelease, we will get a full version based on already-released commits. # So for example, commits that wouldn't trigger a release on a prerelease branch # won't trigger a release if prerelease=true; however, when commits included in a # prerelease branch are merged to a release branch, prerelease=False - so a feat commit # which previously triggered a prerelease on a branch will subsequently trigger a full # release when merged to a full release branch where prerelease=False. # For this reason a couple of these test cases predict a new version even when the # commits being added here don't induce a version bump. @pytest.mark.parametrize( str.join( ", ", [ "repo_result", "commit_messages", "prerelease", "prerelease_token", "next_release_version", "branch_name", ], ), xdist_sort_hack( [ ( # Default case should be a minor bump since last full release was 1.1.1 # last tag is a prerelease 1.2.0-rc.2 lazy_fixture( repo_w_git_flow_w_alpha_prereleases_n_conventional_commits.__name__ ), lazy_fixture(conventional_minor_commits.__name__), False, "alpha", "1.2.0", "main", ), *[ pytest.param( lazy_fixture(repo_fixture_name), [] if commit_messages is None else lazy_fixture(commit_messages), prerelease, prerelease_token, expected_new_version, "main" if branch_name is None else branch_name, marks=pytest.mark.comprehensive, ) for (repo_fixture_name, prerelease_token), values in { # Latest version for repo_with_git_flow is currently 1.2.0-alpha.2 # The last full release version was 1.1.1, so it's had a minor # prerelease ( repo_w_git_flow_w_alpha_prereleases_n_conventional_commits.__name__, "alpha", ): [ (conventional_patch_commits.__name__, False, "1.1.2", None), ( conventional_patch_commits.__name__, True, "1.1.2-alpha.1", None, ), ( conventional_minor_commits.__name__, True, "1.2.0-alpha.3", "feat/feature-4", # branch ), (conventional_major_commits.__name__, False, "2.0.0", None), ( conventional_major_commits.__name__, True, "2.0.0-alpha.1", None, ), ], # Latest version for repo_with_git_flow_and_release_channels is # currently 1.1.0 ( repo_w_git_flow_w_rc_n_alpha_prereleases_n_conventional_commits.__name__, "alpha", ): [ (conventional_patch_commits.__name__, False, "1.1.1", None), ( conventional_patch_commits.__name__, True, "1.1.1-alpha.1", None, ), (conventional_minor_commits.__name__, False, "1.2.0", None), ( conventional_minor_commits.__name__, True, "1.2.0-alpha.1", None, ), (conventional_major_commits.__name__, False, "2.0.0", None), ( conventional_major_commits.__name__, True, "2.0.0-alpha.1", None, ), ], }.items() for ( commit_messages, prerelease, expected_new_version, branch_name, ) in values # type: ignore[attr-defined] ], ] ), ) # TODO: add a github flow test case def test_version_next_greater_than_version_one_conventional( repo_result: BuiltRepoResult, commit_messages: list[str], prerelease: bool, prerelease_token: str, next_release_version: str, branch_name: str, cli_runner: CliRunner, file_in_repo: str, mocked_git_push: MagicMock, post_mocker: Mocker, stable_now_date: GetStableDateNowFn, ): repo = repo_result["repo"] # setup: select the branch we desire for the next bump if repo.active_branch.name != branch_name: repo.heads[branch_name].checkout() # setup: apply commits to the repo stable_now_datetime = stable_now_date() commit_timestamp_gen = ( (stable_now_datetime + timedelta(seconds=i)).isoformat(timespec="seconds") for i in count(step=1) ) for commit_message in commit_messages or []: add_text_to_file(repo, file_in_repo) repo.git.commit(m=commit_message, a=True, date=next(commit_timestamp_gen)) # Setup: take measurement before running the version command head_sha_before = repo.head.commit.hexsha tags_before = {tag.name for tag in repo.tags} # Derive the cli arguments based on parameter input prerelease_args = list( filter( None, [ "--as-prerelease" if prerelease else "", *(["--prerelease-token", prerelease_token] if prerelease else []), ], ) ) # Act cli_cmd = [MAIN_PROG_NAME, VERSION_SUBCMD, *prerelease_args] result = cli_runner.invoke(main, cli_cmd[1:]) # take measurement after running the version command head_after = repo.head.commit tags_after = {tag.name for tag in repo.tags} tags_set_difference = set.difference(tags_after, tags_before) # Evaluate (normal release actions should have occurred when forced patch bump) assert_successful_exit_code(result, cli_cmd) # A commit has been made (regardless of precommit) assert [head_sha_before] == [head.hexsha for head in head_after.parents] assert len(tags_set_difference) == 1 # A tag has been created assert f"v{next_release_version}" in tags_set_difference assert mocked_git_push.call_count == 2 # 1 for commit, 1 for tag assert post_mocker.call_count == 1 # vcs release creation occurred @pytest.mark.parametrize( str.join( ", ", [ "repo_result", "commit_messages", "prerelease", "prerelease_token", "next_release_version", "branch_name", ], ), xdist_sort_hack( [ *[ pytest.param( lazy_fixture(repo_fixture_name), [] if commit_messages is None else lazy_fixture(commit_messages), prerelease, prerelease_token, expected_new_version, "main" if branch_name is None else branch_name, marks=pytest.mark.comprehensive, ) for (repo_fixture_name, prerelease_token), values in { # Latest version for repo_with_git_flow is currently 1.2.0-alpha.2 # The last full release version was 1.1.1, so it's had a minor # prerelease ( repo_w_git_flow_w_alpha_prereleases_n_conventional_commits.__name__, "alpha", ): [ *( (commits, True, "1.2.0-alpha.2", "feat/feature-4") for commits in ( None, conventional_chore_commits.__name__, ) ), *( (commits, False, "1.1.1", None) for commits in ( None, conventional_chore_commits.__name__, ) ), ], # Latest version for repo_with_git_flow_and_release_channels is # currently 1.1.0 ( repo_w_git_flow_w_rc_n_alpha_prereleases_n_conventional_commits.__name__, "alpha", ): [ *( (commits, prerelease, "1.1.0", None) for prerelease in (True, False) for commits in ( None, conventional_chore_commits.__name__, ) ), ], }.items() for ( commit_messages, prerelease, expected_new_version, branch_name, ) in values # type: ignore[attr-defined] ], ] ), ) def test_version_next_greater_than_version_one_no_bump_conventional( repo_result: BuiltRepoResult, commit_messages: list[str], prerelease: bool, prerelease_token: str, next_release_version: str, branch_name: str, cli_runner: CliRunner, file_in_repo: str, mocked_git_push: MagicMock, post_mocker: Mocker, stable_now_date: GetStableDateNowFn, ): repo = repo_result["repo"] # setup: select the branch we desire for the next bump if repo.active_branch.name != branch_name: repo.heads[branch_name].checkout() # setup: apply commits to the repo stable_now_datetime = stable_now_date() commit_timestamp_gen = ( (stable_now_datetime + timedelta(seconds=i)).isoformat(timespec="seconds") for i in count(step=1) ) for commit_message in commit_messages or []: add_text_to_file(repo, file_in_repo) repo.git.commit(m=commit_message, a=True, date=next(commit_timestamp_gen)) # Setup: take measurement before running the version command head_sha_before = repo.head.commit.hexsha tags_before = {tag.name for tag in repo.tags} # Derive the cli arguments based on parameter input prerelease_args = list( filter( None, [ "--as-prerelease" if prerelease else "", *(["--prerelease-token", prerelease_token] if prerelease else []), ], ) ) # Act cli_cmd = [MAIN_PROG_NAME, VERSION_SUBCMD, *prerelease_args] result = cli_runner.invoke(main, cli_cmd[1:]) # take measurement after running the version command head_after = repo.head.commit tags_after = {tag.name for tag in repo.tags} tags_set_difference = set.difference(tags_after, tags_before) # Evaluate (no release actions should have occurred when no bump) assert_successful_exit_code(result, cli_cmd) assert f"{next_release_version}\n" == result.stdout # No commit has been made assert head_sha_before == head_after.hexsha assert len(tags_set_difference) == 0 # No tag created assert mocked_git_push.call_count == 0 # no git push of tag or commit assert post_mocker.call_count == 0 # no vcs release @pytest.mark.parametrize( str.join( ", ", [ "repo_result", "commit_messages", "prerelease", "prerelease_token", "next_release_version", "branch_name", ], ), xdist_sort_hack( [ pytest.param( lazy_fixture(repo_fixture_name), [] if commit_messages is None else lazy_fixture(commit_messages), prerelease, prerelease_token, expected_new_version, "main" if branch_name is None else branch_name, marks=pytest.mark.comprehensive, ) for (repo_fixture_name, prerelease_token), values in { # Latest version for repo_with_git_flow is currently 1.2.0-alpha.2 # The last full release version was 1.1.1, so it's had a minor # prerelease ( repo_w_git_flow_w_alpha_prereleases_n_emoji_commits.__name__, "alpha", ): [ (emoji_patch_commits.__name__, False, "1.1.2", None), ( emoji_patch_commits.__name__, True, "1.1.2-alpha.1", None, ), ( emoji_minor_commits.__name__, False, "1.2.0", None, ), ( emoji_minor_commits.__name__, True, "1.2.0-alpha.3", "feat/feature-4", # branch ), (emoji_major_commits.__name__, False, "2.0.0", None), ( emoji_major_commits.__name__, True, "2.0.0-alpha.1", None, ), ], # Latest version for repo_with_git_flow_and_release_channels is # currently 1.1.0 ( repo_w_git_flow_w_rc_n_alpha_prereleases_n_emoji_commits.__name__, "alpha", ): [ (emoji_patch_commits.__name__, False, "1.1.1", None), ( emoji_patch_commits.__name__, True, "1.1.1-alpha.1", None, ), (emoji_minor_commits.__name__, False, "1.2.0", None), ( emoji_minor_commits.__name__, True, "1.2.0-alpha.1", None, ), (emoji_major_commits.__name__, False, "2.0.0", None), ( emoji_major_commits.__name__, True, "2.0.0-alpha.1", None, ), ], }.items() for ( commit_messages, prerelease, expected_new_version, branch_name, ) in values # type: ignore[attr-defined] ] ), ) def test_version_next_greater_than_version_one_emoji( repo_result: BuiltRepoResult, commit_messages: list[str], prerelease: bool, prerelease_token: str, next_release_version: str, branch_name: str, cli_runner: CliRunner, file_in_repo: str, mocked_git_push: MagicMock, post_mocker: Mocker, stable_now_date: GetStableDateNowFn, ): repo = repo_result["repo"] # setup: select the branch we desire for the next bump if repo.active_branch.name != branch_name: repo.heads[branch_name].checkout() # setup: apply commits to the repo stable_now_datetime = stable_now_date() commit_timestamp_gen = ( (stable_now_datetime + timedelta(seconds=i)).isoformat(timespec="seconds") for i in count(step=1) ) for commit_message in commit_messages or []: add_text_to_file(repo, file_in_repo) repo.git.commit(m=commit_message, a=True, date=next(commit_timestamp_gen)) # Setup: take measurement before running the version command head_sha_before = repo.head.commit.hexsha tags_before = {tag.name for tag in repo.tags} # Derive the cli arguments based on parameter input prerelease_args = list( filter( None, [ "--as-prerelease" if prerelease else "", *(["--prerelease-token", prerelease_token] if prerelease else []), ], ) ) # Act cli_cmd = [MAIN_PROG_NAME, VERSION_SUBCMD, *prerelease_args] result = cli_runner.invoke(main, cli_cmd[1:]) # take measurement after running the version command head_after = repo.head.commit tags_after = {tag.name for tag in repo.tags} tags_set_difference = set.difference(tags_after, tags_before) # Evaluate (normal release actions should have occurred when forced patch bump) assert_successful_exit_code(result, cli_cmd) # A commit has been made (regardless of precommit) assert [head_sha_before] == [head.hexsha for head in head_after.parents] assert len(tags_set_difference) == 1 # A tag has been created assert f"v{next_release_version}" in tags_set_difference assert mocked_git_push.call_count == 2 # 1 for commit, 1 for tag assert post_mocker.call_count == 1 # vcs release creation occurred @pytest.mark.parametrize( str.join( ", ", [ "repo_result", "commit_messages", "prerelease", "prerelease_token", "next_release_version", "branch_name", ], ), xdist_sort_hack( [ *[ pytest.param( lazy_fixture(repo_fixture_name), [] if commit_messages is None else lazy_fixture(commit_messages), prerelease, prerelease_token, expected_new_version, "main" if branch_name is None else branch_name, marks=pytest.mark.comprehensive, ) for (repo_fixture_name, prerelease_token), values in { # Latest version for repo_with_git_flow is currently 1.2.0-alpha.2 # The last full release version was 1.1.1, so it's had a minor # prerelease ( repo_w_git_flow_w_alpha_prereleases_n_emoji_commits.__name__, "alpha", ): [ *( (commits, True, "1.2.0-alpha.2", "feat/feature-4") for commits in ( None, emoji_chore_commits.__name__, ) ), *( (commits, False, "1.1.1", None) for commits in ( None, emoji_chore_commits.__name__, ) ), ], # Latest version for repo_with_git_flow_and_release_channels is # currently 1.1.0 ( repo_w_git_flow_w_rc_n_alpha_prereleases_n_emoji_commits.__name__, "alpha", ): [ *( (commits, prerelease, "1.1.0", None) for prerelease in (True, False) for commits in ( None, emoji_chore_commits.__name__, ) ), ], }.items() for ( commit_messages, prerelease, expected_new_version, branch_name, ) in values # type: ignore[attr-defined] ], ] ), ) def test_version_next_greater_than_version_one_no_bump_emoji( repo_result: BuiltRepoResult, commit_messages: list[str], prerelease: bool, prerelease_token: str, next_release_version: str, branch_name: str, cli_runner: CliRunner, file_in_repo: str, mocked_git_push: MagicMock, post_mocker: Mocker, stable_now_date: GetStableDateNowFn, ): repo = repo_result["repo"] # setup: select the branch we desire for the next bump if repo.active_branch.name != branch_name: repo.heads[branch_name].checkout() # setup: apply commits to the repo stable_now_datetime = stable_now_date() commit_timestamp_gen = ( (stable_now_datetime + timedelta(seconds=i)).isoformat(timespec="seconds") for i in count(step=1) ) for commit_message in commit_messages or []: add_text_to_file(repo, file_in_repo) repo.git.commit(m=commit_message, a=True, date=next(commit_timestamp_gen)) # Setup: take measurement before running the version command head_sha_before = repo.head.commit.hexsha tags_before = {tag.name for tag in repo.tags} # Derive the cli arguments based on parameter input prerelease_args = list( filter( None, [ "--as-prerelease" if prerelease else "", *(["--prerelease-token", prerelease_token] if prerelease else []), ], ) ) # Act cli_cmd = [MAIN_PROG_NAME, VERSION_SUBCMD, *prerelease_args] result = cli_runner.invoke(main, cli_cmd[1:]) # take measurement after running the version command head_after = repo.head.commit tags_after = {tag.name for tag in repo.tags} tags_set_difference = set.difference(tags_after, tags_before) # Evaluate (normal release actions should have occurred when forced patch bump) assert_successful_exit_code(result, cli_cmd) assert f"{next_release_version}\n" == result.stdout # No commit has been made assert head_sha_before == head_after.hexsha assert len(tags_set_difference) == 0 # No tag created assert mocked_git_push.call_count == 0 # no git push of tag or commit assert post_mocker.call_count == 0 # no vcs release @pytest.mark.parametrize( str.join( ", ", [ "repo_result", "commit_messages", "prerelease", "prerelease_token", "next_release_version", "branch_name", ], ), xdist_sort_hack( [ pytest.param( lazy_fixture(repo_fixture_name), [] if commit_messages is None else lazy_fixture(commit_messages), prerelease, prerelease_token, expected_new_version, "main" if branch_name is None else branch_name, marks=pytest.mark.comprehensive, ) for (repo_fixture_name, prerelease_token), values in { # Latest version for repo_with_git_flow is currently 1.2.0-alpha.2 # The last full release version was 1.1.1, so it's had a minor # prerelease ( repo_w_git_flow_w_alpha_prereleases_n_scipy_commits.__name__, "alpha", ): [ (scipy_patch_commits.__name__, False, "1.1.2", None), ( scipy_patch_commits.__name__, True, "1.1.2-alpha.1", None, ), ( scipy_minor_commits.__name__, False, "1.2.0", None, ), ( scipy_minor_commits.__name__, True, "1.2.0-alpha.3", "feat/feature-4", # branch ), (scipy_major_commits.__name__, False, "2.0.0", None), ( scipy_major_commits.__name__, True, "2.0.0-alpha.1", None, ), ], # Latest version for repo_with_git_flow_and_release_channels is # currently 1.1.0 ( repo_w_git_flow_w_rc_n_alpha_prereleases_n_scipy_commits.__name__, "alpha", ): [ (scipy_patch_commits.__name__, False, "1.1.1", None), ( scipy_patch_commits.__name__, True, "1.1.1-alpha.1", None, ), (scipy_minor_commits.__name__, False, "1.2.0", None), ( scipy_minor_commits.__name__, True, "1.2.0-alpha.1", None, ), (scipy_major_commits.__name__, False, "2.0.0", None), ( scipy_major_commits.__name__, True, "2.0.0-alpha.1", None, ), ], }.items() for ( commit_messages, prerelease, expected_new_version, branch_name, ) in values # type: ignore[attr-defined] ], ), ) def test_version_next_greater_than_version_one_scipy( repo_result: BuiltRepoResult, commit_messages: list[str], prerelease: bool, prerelease_token: str, next_release_version: str, branch_name: str, cli_runner: CliRunner, file_in_repo: str, mocked_git_push: MagicMock, post_mocker: Mocker, stable_now_date: GetStableDateNowFn, ): repo = repo_result["repo"] # setup: select the branch we desire for the next bump if repo.active_branch.name != branch_name: repo.heads[branch_name].checkout() # setup: apply commits to the repo stable_now_datetime = stable_now_date() commit_timestamp_gen = ( (stable_now_datetime + timedelta(seconds=i)).isoformat(timespec="seconds") for i in count(step=1) ) for commit_message in commit_messages or []: add_text_to_file(repo, file_in_repo) repo.git.commit(m=commit_message, a=True, date=next(commit_timestamp_gen)) # Setup: take measurement before running the version command head_sha_before = repo.head.commit.hexsha tags_before = {tag.name for tag in repo.tags} # Derive the cli arguments based on parameter input prerelease_args = list( filter( None, [ "--as-prerelease" if prerelease else "", *(["--prerelease-token", prerelease_token] if prerelease else []), ], ) ) # Act cli_cmd = [MAIN_PROG_NAME, VERSION_SUBCMD, *prerelease_args] result = cli_runner.invoke(main, cli_cmd[1:]) # take measurement after running the version command head_after = repo.head.commit tags_after = {tag.name for tag in repo.tags} tags_set_difference = set.difference(tags_after, tags_before) # Evaluate (normal release actions should have occurred when forced patch bump) assert_successful_exit_code(result, cli_cmd) # A commit has been made (regardless of precommit) assert [head_sha_before] == [head.hexsha for head in head_after.parents] assert len(tags_set_difference) == 1 # A tag has been created assert f"v{next_release_version}" in tags_set_difference assert mocked_git_push.call_count == 2 # 1 for commit, 1 for tag assert post_mocker.call_count == 1 # vcs release creation occurred @pytest.mark.parametrize( str.join( ", ", [ "repo_result", "commit_messages", "prerelease", "prerelease_token", "next_release_version", "branch_name", ], ), xdist_sort_hack( [ *[ pytest.param( lazy_fixture(repo_fixture_name), [] if commit_messages is None else lazy_fixture(commit_messages), prerelease, prerelease_token, expected_new_version, "main" if branch_name is None else branch_name, marks=pytest.mark.comprehensive, ) for (repo_fixture_name, prerelease_token), values in { # Latest version for repo_with_git_flow is currently 1.2.0-alpha.2 # The last full release version was 1.1.1, so it's had a minor # prerelease ( repo_w_git_flow_w_alpha_prereleases_n_scipy_commits.__name__, "alpha", ): [ *( (commits, True, "1.2.0-alpha.2", "feat/feature-4") for commits in ( None, scipy_chore_commits.__name__, ) ), *( (commits, False, "1.1.1", None) for commits in ( None, scipy_chore_commits.__name__, ) ), ], # Latest version for repo_with_git_flow_and_release_channels is # currently 1.1.0 ( repo_w_git_flow_w_rc_n_alpha_prereleases_n_scipy_commits.__name__, "alpha", ): [ *( (commits, prerelease, "1.1.0", None) for prerelease in (True, False) for commits in ( None, scipy_chore_commits.__name__, ) ), ], }.items() for ( commit_messages, prerelease, expected_new_version, branch_name, ) in values # type: ignore[attr-defined] ], ] ), ) def test_version_next_greater_than_version_one_no_bump_scipy( repo_result: BuiltRepoResult, commit_messages: list[str], prerelease: bool, prerelease_token: str, next_release_version: str, branch_name: str, cli_runner: CliRunner, file_in_repo: str, mocked_git_push: MagicMock, post_mocker: Mocker, stable_now_date: GetStableDateNowFn, ): repo = repo_result["repo"] # setup: select the branch we desire for the next bump if repo.active_branch.name != branch_name: repo.heads[branch_name].checkout() # setup: apply commits to the repo stable_now_datetime = stable_now_date() commit_timestamp_gen = ( (stable_now_datetime + timedelta(seconds=i)).isoformat(timespec="seconds") for i in count(step=1) ) for commit_message in commit_messages or []: add_text_to_file(repo, file_in_repo) repo.git.commit(m=commit_message, a=True, date=next(commit_timestamp_gen)) # Setup: take measurement before running the version command head_sha_before = repo.head.commit.hexsha tags_before = {tag.name for tag in repo.tags} # Derive the cli arguments based on parameter input prerelease_args = list( filter( None, [ "--as-prerelease" if prerelease else "", *(["--prerelease-token", prerelease_token] if prerelease else []), ], ) ) # Act cli_cmd = [MAIN_PROG_NAME, VERSION_SUBCMD, *prerelease_args] result = cli_runner.invoke(main, cli_cmd[1:]) # take measurement after running the version command head_after = repo.head.commit tags_after = {tag.name for tag in repo.tags} tags_set_difference = set.difference(tags_after, tags_before) # Evaluate (no release actions should have occurred when no bump) assert_successful_exit_code(result, cli_cmd) assert f"{next_release_version}\n" == result.stdout # No commit has been made assert head_sha_before == head_after.hexsha assert len(tags_set_difference) == 0 # No tag created assert mocked_git_push.call_count == 0 # no git push of tag or commit assert post_mocker.call_count == 0 # no vcs release # ============================================================================= # # Zero Dot version tests (ex. 0.x.y versions) # ============================================================================= # @pytest.mark.parametrize( str.join( ", ", [ "repo_result", "commit_messages", "prerelease", "prerelease_token", "major_on_zero", "allow_zero_version", "next_release_version", "branch_name", ], ), xdist_sort_hack( [ ( # Latest version for repo_with_no_tags is currently 0.0.0 (default) # It's biggest change type is minor, so the next version should be 0.1.0 # Given the major_on_zero is False and the version is starting at 0.0.0, # the major level commits are limited to only causing a minor level bump lazy_fixture(repo_w_no_tags_conventional_commits.__name__), lazy_fixture(conventional_major_commits.__name__), False, "rc", False, True, "0.1.0", "main", ), *[ pytest.param( lazy_fixture(repo_fixture_name), commit_messages, prerelease, "rc" if prerelease_token is None else prerelease_token, major_on_zero, allow_zero_version, next_release_version, "main" if branch_name is None else branch_name, marks=pytest.mark.comprehensive, ) for (repo_fixture_name, prerelease_token), values in { # Latest version for repo_with_no_tags is currently 0.0.0 (default) # It's biggest change type is minor, so the next version should be 0.1.0 ( repo_w_no_tags_conventional_commits.__name__, None, ): [ *( # when prerelease is False, & major_on_zero is False & # allow_zero_version is True, the version should be # 0.1.0, with the given commits (commits, False, False, True, "0.1.0", None) for commits in ( # Even when this test does not change anything, the base modification # will be a minor change and thus the version will be bumped to 0.1.0 None, # Non version bumping commits are absorbed into the previously detected minor bump lazy_fixture(conventional_chore_commits.__name__), # Patch commits are absorbed into the previously detected minor bump lazy_fixture(conventional_patch_commits.__name__), # Minor level commits are absorbed into the previously detected minor bump lazy_fixture(conventional_minor_commits.__name__), # Given the major_on_zero is False and the version is starting at 0.0.0, # the major level commits are limited to only causing a minor level bump # lazy_fixture(conventional_major_commits.__name__), # used as default ) ), # when prerelease is False, & major_on_zero is False, & allow_zero_version is True, # the version should only be minor bumped when provided major commits because # of the major_on_zero value ( lazy_fixture(conventional_major_commits.__name__), False, False, True, "0.1.0", None, ), # when prerelease is False, & major_on_zero is True & allow_zero_version is True, # the version should be major bumped when provided major commits because # of the major_on_zero value ( lazy_fixture(conventional_major_commits.__name__), False, True, True, "1.0.0", None, ), *( # when prerelease is False, & allow_zero_version is False, the version should be # 1.0.0, across the board because 0 is not a valid major version. # major_on_zero is ignored as it is not relevant but tested for completeness (commits, False, major_on_zero, False, "1.0.0", None) for major_on_zero in (True, False) for commits in ( None, lazy_fixture(conventional_chore_commits.__name__), lazy_fixture(conventional_patch_commits.__name__), lazy_fixture(conventional_minor_commits.__name__), lazy_fixture(conventional_major_commits.__name__), ) ), ], # Latest version for repo_with_single_branch is currently 0.1.1 # Note repo_with_single_branch isn't modelled with prereleases ( repo_w_trunk_only_conventional_commits.__name__, None, ): [ *( # when prerelease must be False, and allow_zero_version is True, # the version is patch bumped because of the patch level commits # regardless of the major_on_zero value ( lazy_fixture(conventional_patch_commits.__name__), False, major_on_zero, True, "0.1.2", None, ) for major_on_zero in (True, False) ), *( # when prerelease must be False, and allow_zero_version is True, # the version is minor bumped because of the major_on_zero value=False (commits, False, False, True, "0.2.0", None) for commits in ( lazy_fixture(conventional_minor_commits.__name__), lazy_fixture(conventional_major_commits.__name__), ) ), # when prerelease must be False, and allow_zero_version is True, # but the major_on_zero is True, then when a major level commit is given, # the version should be bumped to the next major version ( lazy_fixture(conventional_major_commits.__name__), False, True, True, "1.0.0", None, ), *( # when prerelease must be False, & allow_zero_version is False, the version should be # 1.0.0, with any change regardless of major_on_zero (commits, False, major_on_zero, False, "1.0.0", None) for major_on_zero in (True, False) for commits in ( None, lazy_fixture(conventional_chore_commits.__name__), lazy_fixture(conventional_patch_commits.__name__), lazy_fixture(conventional_minor_commits.__name__), lazy_fixture(conventional_major_commits.__name__), ) ), ], # Latest version for repo_with_single_branch_and_prereleases is # currently 0.2.0 ( repo_w_trunk_only_n_prereleases_conventional_commits.__name__, None, ): [ # when allow_zero_version is True, # prerelease is False, & major_on_zero is False, the version should be # patch bumped as a prerelease version, when given patch level commits ( lazy_fixture(conventional_patch_commits.__name__), True, False, True, "0.2.1-rc.1", None, ), # when allow_zero_version is True, # prerelease is False, & major_on_zero is False, the version should be # patch bumped, when given patch level commits ( lazy_fixture(conventional_patch_commits.__name__), False, False, True, "0.2.1", None, ), *( # when allow_zero_version is True, # prerelease is True, & major_on_zero is False, the version should be # minor bumped as a prerelease version, when given commits of a minor or major level (commits, True, False, True, "0.3.0-rc.1", None) for commits in ( lazy_fixture(conventional_minor_commits.__name__), lazy_fixture(conventional_major_commits.__name__), ) ), *( # when allow_zero_version is True, prerelease is True, & major_on_zero # is False, the version should be minor bumped, when given commits of a # minor or major level because major_on_zero = False (commits, False, False, True, "0.3.0", None) for commits in ( lazy_fixture(conventional_minor_commits.__name__), lazy_fixture(conventional_major_commits.__name__), ) ), # when prerelease is True, & major_on_zero is True, and allow_zero_version # is True, the version should be bumped to 1.0.0 as a prerelease version, when # given major level commits ( lazy_fixture(conventional_major_commits.__name__), True, True, True, "1.0.0-rc.1", None, ), # when prerelease is False, & major_on_zero is True, and allow_zero_version # is True, the version should be bumped to 1.0.0, when given major level commits ( lazy_fixture(conventional_major_commits.__name__), False, True, True, "1.0.0", None, ), *( # when prerelease is True, & allow_zero_version is False, the version should be # bumped to 1.0.0 as a prerelease version, when given any/none commits # because 0.x is no longer a valid version regardless of the major_on_zero value (commits, True, major_on_zero, False, "1.0.0-rc.1", None) for major_on_zero in (True, False) for commits in ( None, lazy_fixture(conventional_chore_commits.__name__), lazy_fixture(conventional_patch_commits.__name__), lazy_fixture(conventional_minor_commits.__name__), lazy_fixture(conventional_major_commits.__name__), ) ), *( # when prerelease is True, & allow_zero_version is False, the version should be # bumped to 1.0.0, when given any/none commits # because 0.x is no longer a valid version regardless of the major_on_zero value (commits, False, major_on_zero, False, "1.0.0", None) for major_on_zero in (True, False) for commits in ( lazy_fixture(conventional_patch_commits.__name__), lazy_fixture(conventional_minor_commits.__name__), lazy_fixture(conventional_major_commits.__name__), ) ), ], }.items() for ( commit_messages, prerelease, major_on_zero, allow_zero_version, next_release_version, branch_name, ) in values # type: ignore[attr-defined] ], ], ), ) def test_version_next_w_zero_dot_versions_conventional( repo_result: BuiltRepoResult, commit_messages: list[str], prerelease: bool, prerelease_token: str, next_release_version: str, branch_name: str, major_on_zero: bool, allow_zero_version: bool, cli_runner: CliRunner, file_in_repo: str, update_pyproject_toml: UpdatePyprojectTomlFn, mocked_git_push: MagicMock, post_mocker: Mocker, stable_now_date: GetStableDateNowFn, ): repo = repo_result["repo"] # setup: select the branch we desire for the next bump if repo.active_branch.name != branch_name: repo.heads[branch_name].checkout() # setup: update pyproject.toml with the necessary settings update_pyproject_toml( "tool.semantic_release.allow_zero_version", allow_zero_version ) update_pyproject_toml("tool.semantic_release.major_on_zero", major_on_zero) # setup: apply commits to the repo stable_now_datetime = stable_now_date() commit_timestamp_gen = ( (stable_now_datetime + timedelta(seconds=i)).isoformat(timespec="seconds") for i in count(step=1) ) for commit_message in commit_messages or []: add_text_to_file(repo, file_in_repo) repo.git.commit(m=commit_message, a=True, date=next(commit_timestamp_gen)) # Setup: take measurement before running the version command head_sha_before = repo.head.commit.hexsha tags_before = {tag.name for tag in repo.tags} # Derive the cli arguments based on parameter input prerelease_args = list( filter( None, [ "--as-prerelease" if prerelease else "", *(["--prerelease-token", prerelease_token] if prerelease else []), ], ) ) # Act cli_cmd = [MAIN_PROG_NAME, VERSION_SUBCMD, *prerelease_args] result = cli_runner.invoke(main, cli_cmd[1:]) # take measurement after running the version command head_after = repo.head.commit tags_after = {tag.name for tag in repo.tags} tags_set_difference = set.difference(tags_after, tags_before) # Evaluate (normal release actions should have occurred when forced patch bump) assert_successful_exit_code(result, cli_cmd) # A commit has been made (regardless of precommit) assert [head_sha_before] == [head.hexsha for head in head_after.parents] assert len(tags_set_difference) == 1 # A tag has been created assert f"v{next_release_version}" in tags_set_difference assert mocked_git_push.call_count == 2 # 1 for commit, 1 for tag assert post_mocker.call_count == 1 # vcs release creation occurred @pytest.mark.parametrize( str.join( ", ", [ "repo_result", "commit_messages", "prerelease", "prerelease_token", "major_on_zero", "allow_zero_version", "next_release_version", "branch_name", ], ), xdist_sort_hack( [ *[ pytest.param( lazy_fixture(repo_fixture_name), commit_messages, prerelease, "rc" if prerelease_token is None else prerelease_token, major_on_zero, allow_zero_version, next_release_version, "main" if branch_name is None else branch_name, marks=pytest.mark.comprehensive, ) for (repo_fixture_name, prerelease_token), values in { # Latest version for repo_with_single_branch is currently 0.1.1 # Note repo_with_single_branch isn't modelled with prereleases ( repo_w_trunk_only_conventional_commits.__name__, None, ): [ *( # when prerelease must be False, and allow_zero_version is True, # the version is not bumped because of non valuable changes regardless # of the major_on_zero value (commits, False, major_on_zero, True, "0.1.1", None) for major_on_zero in (True, False) for commits in ( None, lazy_fixture(conventional_chore_commits.__name__), ) ), ], # Latest version for repo_with_single_branch_and_prereleases is # currently 0.2.0 ( repo_w_trunk_only_n_prereleases_conventional_commits.__name__, None, ): [ *( # when allow_zero_version is True, the version is not bumped # regardless of prerelease and major_on_zero values when given # non valuable changes (commits, prerelease, major_on_zero, True, "0.2.0", None) for prerelease in (True, False) for major_on_zero in (True, False) for commits in ( None, lazy_fixture(conventional_chore_commits.__name__), ) ), ], }.items() for ( commit_messages, prerelease, major_on_zero, allow_zero_version, next_release_version, branch_name, ) in values # type: ignore[attr-defined] ], ], ), ) def test_version_next_w_zero_dot_versions_no_bump_conventional( repo_result: BuiltRepoResult, commit_messages: list[str], prerelease: bool, prerelease_token: str, next_release_version: str, branch_name: str, major_on_zero: bool, allow_zero_version: bool, cli_runner: CliRunner, file_in_repo: str, update_pyproject_toml: UpdatePyprojectTomlFn, mocked_git_push: MagicMock, post_mocker: Mocker, stable_now_date: GetStableDateNowFn, ): repo = repo_result["repo"] # setup: select the branch we desire for the next bump if repo.active_branch.name != branch_name: repo.heads[branch_name].checkout() # setup: update pyproject.toml with the necessary settings update_pyproject_toml( "tool.semantic_release.allow_zero_version", allow_zero_version ) update_pyproject_toml("tool.semantic_release.major_on_zero", major_on_zero) # setup: apply commits to the repo stable_now_datetime = stable_now_date() commit_timestamp_gen = ( (stable_now_datetime + timedelta(seconds=i)).isoformat(timespec="seconds") for i in count(step=1) ) for commit_message in commit_messages or []: add_text_to_file(repo, file_in_repo) repo.git.commit(m=commit_message, a=True, date=next(commit_timestamp_gen)) # Setup: take measurement before running the version command head_sha_before = repo.head.commit.hexsha tags_before = {tag.name for tag in repo.tags} # Derive the cli arguments based on parameter input prerelease_args = list( filter( None, [ "--as-prerelease" if prerelease else "", *(["--prerelease-token", prerelease_token] if prerelease else []), ], ) ) # Act cli_cmd = [MAIN_PROG_NAME, VERSION_SUBCMD, *prerelease_args] result = cli_runner.invoke(main, cli_cmd[1:]) # take measurement after running the version command head_after = repo.head.commit tags_after = {tag.name for tag in repo.tags} tags_set_difference = set.difference(tags_after, tags_before) # Evaluate (no release actions should have occurred when no bump) assert_successful_exit_code(result, cli_cmd) assert f"{next_release_version}\n" == result.stdout # No commit has been made assert head_sha_before == head_after.hexsha assert len(tags_set_difference) == 0 # No tag created assert mocked_git_push.call_count == 0 # no git push of tag or commit assert post_mocker.call_count == 0 # no vcs release @pytest.mark.parametrize( str.join( ", ", [ "repo_result", "commit_messages", "prerelease", "prerelease_token", "major_on_zero", "allow_zero_version", "next_release_version", "branch_name", ], ), xdist_sort_hack( [ pytest.param( lazy_fixture(repo_fixture_name), commit_messages, prerelease, "rc" if prerelease_token is None else prerelease_token, major_on_zero, allow_zero_version, next_release_version, "main" if branch_name is None else branch_name, marks=pytest.mark.comprehensive, ) for (repo_fixture_name, prerelease_token), values in { # Latest version for repo_with_no_tags is currently 0.0.0 (default) # It's biggest change type is minor, so the next version should be 0.1.0 ( repo_w_no_tags_emoji_commits.__name__, None, ): [ *( # when prerelease is False, & major_on_zero is False & # allow_zero_version is True, the version should be # 0.1.0, with the given commits (commits, False, False, True, "0.1.0", None) for commits in ( # Even when this test does not change anything, the base modification # will be a minor change and thus the version will be bumped to 0.1.0 None, # Non version bumping commits are absorbed into the previously detected minor bump lazy_fixture(emoji_chore_commits.__name__), # Patch commits are absorbed into the previously detected minor bump lazy_fixture(emoji_patch_commits.__name__), # Minor level commits are absorbed into the previously detected minor bump lazy_fixture(emoji_minor_commits.__name__), # Given the major_on_zero is False and the version is starting at 0.0.0, # the major level commits are limited to only causing a minor level bump lazy_fixture(emoji_major_commits.__name__), ) ), # when prerelease is False, & major_on_zero is False, & allow_zero_version is True, # the version should only be minor bumped when provided major commits because # of the major_on_zero value ( lazy_fixture(emoji_major_commits.__name__), False, False, True, "0.1.0", None, ), # when prerelease is False, & major_on_zero is True & allow_zero_version is True, # the version should be major bumped when provided major commits because # of the major_on_zero value ( lazy_fixture(emoji_major_commits.__name__), False, True, True, "1.0.0", None, ), *( # when prerelease is False, & allow_zero_version is False, the version should be # 1.0.0, across the board because 0 is not a valid major version. # major_on_zero is ignored as it is not relevant but tested for completeness (commits, False, major_on_zero, False, "1.0.0", None) for major_on_zero in (True, False) for commits in ( None, lazy_fixture(emoji_chore_commits.__name__), lazy_fixture(emoji_patch_commits.__name__), lazy_fixture(emoji_minor_commits.__name__), lazy_fixture(emoji_major_commits.__name__), ) ), ], # Latest version for repo_with_single_branch is currently 0.1.1 # Note repo_with_single_branch isn't modelled with prereleases ( repo_w_trunk_only_emoji_commits.__name__, None, ): [ *( # when prerelease must be False, and allow_zero_version is True, # the version is patch bumped because of the patch level commits # regardless of the major_on_zero value ( lazy_fixture(emoji_patch_commits.__name__), False, major_on_zero, True, "0.1.2", None, ) for major_on_zero in (True, False) ), *( # when prerelease must be False, and allow_zero_version is True, # the version is minor bumped because of the major_on_zero value=False (commits, False, False, True, "0.2.0", None) for commits in ( lazy_fixture(emoji_minor_commits.__name__), lazy_fixture(emoji_major_commits.__name__), ) ), # when prerelease must be False, and allow_zero_version is True, # but the major_on_zero is True, then when a major level commit is given, # the version should be bumped to the next major version ( lazy_fixture(emoji_major_commits.__name__), False, True, True, "1.0.0", None, ), *( # when prerelease must be False, & allow_zero_version is False, the version should be # 1.0.0, with any change regardless of major_on_zero (commits, False, major_on_zero, False, "1.0.0", None) for major_on_zero in (True, False) for commits in ( None, lazy_fixture(emoji_chore_commits.__name__), lazy_fixture(emoji_patch_commits.__name__), lazy_fixture(emoji_minor_commits.__name__), lazy_fixture(emoji_major_commits.__name__), ) ), ], # Latest version for repo_with_single_branch_and_prereleases is # currently 0.2.0 ( repo_w_trunk_only_n_prereleases_emoji_commits.__name__, None, ): [ # when allow_zero_version is True, # prerelease is False, & major_on_zero is False, the version should be # patch bumped as a prerelease version, when given patch level commits ( lazy_fixture(emoji_patch_commits.__name__), True, False, True, "0.2.1-rc.1", None, ), # when allow_zero_version is True, # prerelease is False, & major_on_zero is False, the version should be # patch bumped, when given patch level commits ( lazy_fixture(emoji_patch_commits.__name__), False, False, True, "0.2.1", None, ), *( # when allow_zero_version is True, # prerelease is True, & major_on_zero is False, the version should be # minor bumped as a prerelease version, when given commits of a minor or major level (commits, True, False, True, "0.3.0-rc.1", None) for commits in ( lazy_fixture(emoji_minor_commits.__name__), lazy_fixture(emoji_major_commits.__name__), ) ), *( # when allow_zero_version is True, prerelease is True, & major_on_zero # is False, the version should be minor bumped, when given commits of a # minor or major level because major_on_zero = False (commits, False, False, True, "0.3.0", None) for commits in ( lazy_fixture(emoji_minor_commits.__name__), lazy_fixture(emoji_major_commits.__name__), ) ), # when prerelease is True, & major_on_zero is True, and allow_zero_version # is True, the version should be bumped to 1.0.0 as a prerelease version, when # given major level commits ( lazy_fixture(emoji_major_commits.__name__), True, True, True, "1.0.0-rc.1", None, ), # when prerelease is False, & major_on_zero is True, and allow_zero_version # is True, the version should be bumped to 1.0.0, when given major level commits ( lazy_fixture(emoji_major_commits.__name__), False, True, True, "1.0.0", None, ), *( # when prerelease is True, & allow_zero_version is False, the version should be # bumped to 1.0.0 as a prerelease version, when given any/none commits # because 0.x is no longer a valid version regardless of the major_on_zero value (commits, True, major_on_zero, False, "1.0.0-rc.1", None) for major_on_zero in (True, False) for commits in ( None, lazy_fixture(emoji_chore_commits.__name__), lazy_fixture(emoji_patch_commits.__name__), lazy_fixture(emoji_minor_commits.__name__), lazy_fixture(emoji_major_commits.__name__), ) ), *( # when prerelease is True, & allow_zero_version is False, the version should be # bumped to 1.0.0, when given any/none commits # because 0.x is no longer a valid version regardless of the major_on_zero value (commits, False, major_on_zero, False, "1.0.0", None) for major_on_zero in (True, False) for commits in ( lazy_fixture(emoji_patch_commits.__name__), lazy_fixture(emoji_minor_commits.__name__), lazy_fixture(emoji_major_commits.__name__), ) ), ], }.items() for ( commit_messages, prerelease, major_on_zero, allow_zero_version, next_release_version, branch_name, ) in values # type: ignore[attr-defined] ], ), ) def test_version_next_w_zero_dot_versions_emoji( repo_result: BuiltRepoResult, commit_messages: list[str], prerelease: bool, prerelease_token: str, next_release_version: str, branch_name: str, major_on_zero: bool, allow_zero_version: bool, cli_runner: CliRunner, file_in_repo: str, update_pyproject_toml: UpdatePyprojectTomlFn, mocked_git_push: MagicMock, post_mocker: Mocker, stable_now_date: GetStableDateNowFn, ): repo = repo_result["repo"] # setup: select the branch we desire for the next bump if repo.active_branch.name != branch_name: repo.heads[branch_name].checkout() # setup: update pyproject.toml with the necessary settings update_pyproject_toml( "tool.semantic_release.allow_zero_version", allow_zero_version ) update_pyproject_toml("tool.semantic_release.major_on_zero", major_on_zero) # setup: apply commits to the repo stable_now_datetime = stable_now_date() commit_timestamp_gen = ( (stable_now_datetime + timedelta(seconds=i)).isoformat(timespec="seconds") for i in count(step=1) ) for commit_message in commit_messages or []: add_text_to_file(repo, file_in_repo) repo.git.commit(m=commit_message, a=True, date=next(commit_timestamp_gen)) # Setup: take measurement before running the version command head_sha_before = repo.head.commit.hexsha tags_before = {tag.name for tag in repo.tags} # Derive the cli arguments based on parameter input prerelease_args = list( filter( None, [ "--as-prerelease" if prerelease else "", *(["--prerelease-token", prerelease_token] if prerelease else []), ], ) ) # Act cli_cmd = [MAIN_PROG_NAME, VERSION_SUBCMD, *prerelease_args] result = cli_runner.invoke(main, cli_cmd[1:]) # take measurement after running the version command head_after = repo.head.commit tags_after = {tag.name for tag in repo.tags} tags_set_difference = set.difference(tags_after, tags_before) # Evaluate (normal release actions should have occurred when forced patch bump) assert_successful_exit_code(result, cli_cmd) # A commit has been made (regardless of precommit) assert [head_sha_before] == [head.hexsha for head in head_after.parents] assert len(tags_set_difference) == 1 # A tag has been created assert f"v{next_release_version}" in tags_set_difference assert mocked_git_push.call_count == 2 # 1 for commit, 1 for tag assert post_mocker.call_count == 1 # vcs release creation occurred @pytest.mark.parametrize( str.join( ", ", [ "repo_result", "commit_messages", "prerelease", "prerelease_token", "major_on_zero", "allow_zero_version", "next_release_version", "branch_name", ], ), xdist_sort_hack( [ *[ pytest.param( lazy_fixture(repo_fixture_name), commit_messages, prerelease, "rc" if prerelease_token is None else prerelease_token, major_on_zero, allow_zero_version, next_release_version, "main" if branch_name is None else branch_name, marks=pytest.mark.comprehensive, ) for (repo_fixture_name, prerelease_token), values in { # Latest version for repo_with_single_branch is currently 0.1.1 # Note repo_with_single_branch isn't modelled with prereleases ( repo_w_trunk_only_emoji_commits.__name__, None, ): [ *( # when prerelease must be False, and allow_zero_version is True, # the version is not bumped because of non valuable changes regardless # of the major_on_zero value (commits, False, major_on_zero, True, "0.1.1", None) for major_on_zero in (True, False) for commits in ( None, lazy_fixture(emoji_chore_commits.__name__), ) ), ], # Latest version for repo_with_single_branch_and_prereleases is # currently 0.2.0 ( repo_w_trunk_only_n_prereleases_emoji_commits.__name__, None, ): [ *( # when allow_zero_version is True, the version is not bumped # regardless of prerelease and major_on_zero values when given # non valuable changes (commits, prerelease, major_on_zero, True, "0.2.0", None) for prerelease in (True, False) for major_on_zero in (True, False) for commits in ( None, lazy_fixture(emoji_chore_commits.__name__), ) ), ], }.items() for ( commit_messages, prerelease, major_on_zero, allow_zero_version, next_release_version, branch_name, ) in values # type: ignore[attr-defined] ], ], ), ) def test_version_next_w_zero_dot_versions_no_bump_emoji( repo_result: BuiltRepoResult, commit_messages: list[str], prerelease: bool, prerelease_token: str, next_release_version: str, branch_name: str, major_on_zero: bool, allow_zero_version: bool, cli_runner: CliRunner, file_in_repo: str, update_pyproject_toml: UpdatePyprojectTomlFn, mocked_git_push: MagicMock, post_mocker: Mocker, stable_now_date: GetStableDateNowFn, ): repo = repo_result["repo"] # setup: select the branch we desire for the next bump if repo.active_branch.name != branch_name: repo.heads[branch_name].checkout() # setup: update pyproject.toml with the necessary settings update_pyproject_toml( "tool.semantic_release.allow_zero_version", allow_zero_version ) update_pyproject_toml("tool.semantic_release.major_on_zero", major_on_zero) # setup: apply commits to the repo stable_now_datetime = stable_now_date() commit_timestamp_gen = ( (stable_now_datetime + timedelta(seconds=i)).isoformat(timespec="seconds") for i in count(step=1) ) for commit_message in commit_messages or []: add_text_to_file(repo, file_in_repo) repo.git.commit(m=commit_message, a=True, date=next(commit_timestamp_gen)) # Setup: take measurement before running the version command head_sha_before = repo.head.commit.hexsha tags_before = {tag.name for tag in repo.tags} # Derive the cli arguments based on parameter input prerelease_args = list( filter( None, [ "--as-prerelease" if prerelease else "", *(["--prerelease-token", prerelease_token] if prerelease else []), ], ) ) # Act cli_cmd = [MAIN_PROG_NAME, VERSION_SUBCMD, *prerelease_args] result = cli_runner.invoke(main, cli_cmd[1:]) # take measurement after running the version command head_after = repo.head.commit tags_after = {tag.name for tag in repo.tags} tags_set_difference = set.difference(tags_after, tags_before) # Evaluate (no release actions should have occurred when no bump) assert_successful_exit_code(result, cli_cmd) assert f"{next_release_version}\n" == result.stdout # No commit has been made assert head_sha_before == head_after.hexsha assert len(tags_set_difference) == 0 # No tag created assert mocked_git_push.call_count == 0 # no git push of tag or commit assert post_mocker.call_count == 0 # no vcs release @pytest.mark.parametrize( str.join( ", ", [ "repo_result", "commit_messages", "prerelease", "prerelease_token", "major_on_zero", "allow_zero_version", "next_release_version", "branch_name", ], ), xdist_sort_hack( [ pytest.param( lazy_fixture(repo_fixture_name), commit_messages, prerelease, "rc" if prerelease_token is None else prerelease_token, major_on_zero, allow_zero_version, next_release_version, "main" if branch_name is None else branch_name, marks=pytest.mark.comprehensive, ) for (repo_fixture_name, prerelease_token), values in { # Latest version for repo_with_no_tags is currently 0.0.0 (default) # It's biggest change type is minor, so the next version should be 0.1.0 ( repo_w_no_tags_scipy_commits.__name__, None, ): [ *( # when prerelease is False, & major_on_zero is False & # allow_zero_version is True, the version should be # 0.1.0, with the given commits (commits, False, False, True, "0.1.0", None) for commits in ( # Even when this test does not change anything, the base modification # will be a minor change and thus the version will be bumped to 0.1.0 None, # Non version bumping commits are absorbed into the previously detected minor bump lazy_fixture(scipy_chore_commits.__name__), # Patch commits are absorbed into the previously detected minor bump lazy_fixture(scipy_patch_commits.__name__), # Minor level commits are absorbed into the previously detected minor bump lazy_fixture(scipy_minor_commits.__name__), # Given the major_on_zero is False and the version is starting at 0.0.0, # the major level commits are limited to only causing a minor level bump lazy_fixture(scipy_major_commits.__name__), ) ), # when prerelease is False, & major_on_zero is False, & allow_zero_version is True, # the version should only be minor bumped when provided major commits because # of the major_on_zero value ( lazy_fixture(scipy_major_commits.__name__), False, False, True, "0.1.0", None, ), # when prerelease is False, & major_on_zero is True & allow_zero_version is True, # the version should be major bumped when provided major commits because # of the major_on_zero value ( lazy_fixture(scipy_major_commits.__name__), False, True, True, "1.0.0", None, ), *( # when prerelease is False, & allow_zero_version is False, the version should be # 1.0.0, across the board because 0 is not a valid major version. # major_on_zero is ignored as it is not relevant but tested for completeness (commits, False, major_on_zero, False, "1.0.0", None) for major_on_zero in (True, False) for commits in ( None, lazy_fixture(scipy_chore_commits.__name__), lazy_fixture(scipy_patch_commits.__name__), lazy_fixture(scipy_minor_commits.__name__), lazy_fixture(scipy_major_commits.__name__), ) ), ], # Latest version for repo_with_single_branch is currently 0.1.1 # Note repo_with_single_branch isn't modelled with prereleases ( repo_w_trunk_only_scipy_commits.__name__, None, ): [ *( # when prerelease must be False, and allow_zero_version is True, # the version is patch bumped because of the patch level commits # regardless of the major_on_zero value ( lazy_fixture(scipy_patch_commits.__name__), False, major_on_zero, True, "0.1.2", None, ) for major_on_zero in (True, False) ), *( # when prerelease must be False, and allow_zero_version is True, # the version is minor bumped because of the major_on_zero value=False (commits, False, False, True, "0.2.0", None) for commits in ( lazy_fixture(scipy_minor_commits.__name__), lazy_fixture(scipy_major_commits.__name__), ) ), # when prerelease must be False, and allow_zero_version is True, # but the major_on_zero is True, then when a major level commit is given, # the version should be bumped to the next major version ( lazy_fixture(scipy_major_commits.__name__), False, True, True, "1.0.0", None, ), *( # when prerelease must be False, & allow_zero_version is False, the version should be # 1.0.0, with any change regardless of major_on_zero (commits, False, major_on_zero, False, "1.0.0", None) for major_on_zero in (True, False) for commits in ( None, lazy_fixture(scipy_chore_commits.__name__), lazy_fixture(scipy_patch_commits.__name__), lazy_fixture(scipy_minor_commits.__name__), lazy_fixture(scipy_major_commits.__name__), ) ), ], # Latest version for repo_with_single_branch_and_prereleases is # currently 0.2.0 ( repo_w_trunk_only_n_prereleases_scipy_commits.__name__, None, ): [ # when allow_zero_version is True, # prerelease is False, & major_on_zero is False, the version should be # patch bumped as a prerelease version, when given patch level commits ( lazy_fixture(scipy_patch_commits.__name__), True, False, True, "0.2.1-rc.1", None, ), # when allow_zero_version is True, # prerelease is False, & major_on_zero is False, the version should be # patch bumped, when given patch level commits ( lazy_fixture(scipy_patch_commits.__name__), False, False, True, "0.2.1", None, ), *( # when allow_zero_version is True, # prerelease is True, & major_on_zero is False, the version should be # minor bumped as a prerelease version, when given commits of a minor or major level (commits, True, False, True, "0.3.0-rc.1", None) for commits in ( lazy_fixture(scipy_minor_commits.__name__), lazy_fixture(scipy_major_commits.__name__), ) ), *( # when allow_zero_version is True, prerelease is True, & major_on_zero # is False, the version should be minor bumped, when given commits of a # minor or major level because major_on_zero = False (commits, False, False, True, "0.3.0", None) for commits in ( lazy_fixture(scipy_minor_commits.__name__), lazy_fixture(scipy_major_commits.__name__), ) ), # when prerelease is True, & major_on_zero is True, and allow_zero_version # is True, the version should be bumped to 1.0.0 as a prerelease version, when # given major level commits ( lazy_fixture(scipy_major_commits.__name__), True, True, True, "1.0.0-rc.1", None, ), # when prerelease is False, & major_on_zero is True, and allow_zero_version # is True, the version should be bumped to 1.0.0, when given major level commits ( lazy_fixture(scipy_major_commits.__name__), False, True, True, "1.0.0", None, ), *( # when prerelease is True, & allow_zero_version is False, the version should be # bumped to 1.0.0 as a prerelease version, when given any/none commits # because 0.x is no longer a valid version regardless of the major_on_zero value (commits, True, major_on_zero, False, "1.0.0-rc.1", None) for major_on_zero in (True, False) for commits in ( None, lazy_fixture(scipy_chore_commits.__name__), lazy_fixture(scipy_patch_commits.__name__), lazy_fixture(scipy_minor_commits.__name__), lazy_fixture(scipy_major_commits.__name__), ) ), *( # when prerelease is True, & allow_zero_version is False, the version should be # bumped to 1.0.0, when given any/none commits # because 0.x is no longer a valid version regardless of the major_on_zero value (commits, False, major_on_zero, False, "1.0.0", None) for major_on_zero in (True, False) for commits in ( lazy_fixture(scipy_patch_commits.__name__), lazy_fixture(scipy_minor_commits.__name__), lazy_fixture(scipy_major_commits.__name__), ) ), ], }.items() for ( commit_messages, prerelease, major_on_zero, allow_zero_version, next_release_version, branch_name, ) in values # type: ignore[attr-defined] ], ), ) def test_version_next_w_zero_dot_versions_scipy( repo_result: BuiltRepoResult, commit_messages: list[str], prerelease: bool, prerelease_token: str, next_release_version: str, branch_name: str, major_on_zero: bool, allow_zero_version: bool, cli_runner: CliRunner, file_in_repo: str, update_pyproject_toml: UpdatePyprojectTomlFn, mocked_git_push: MagicMock, post_mocker: Mocker, stable_now_date: GetStableDateNowFn, ): repo = repo_result["repo"] # setup: select the branch we desire for the next bump if repo.active_branch.name != branch_name: repo.heads[branch_name].checkout() # setup: update pyproject.toml with the necessary settings update_pyproject_toml( "tool.semantic_release.allow_zero_version", allow_zero_version ) update_pyproject_toml("tool.semantic_release.major_on_zero", major_on_zero) # setup: apply commits to the repo stable_now_datetime = stable_now_date() commit_timestamp_gen = ( (stable_now_datetime + timedelta(seconds=i)).isoformat(timespec="seconds") for i in count(step=1) ) for commit_message in commit_messages or []: add_text_to_file(repo, file_in_repo) repo.git.commit(m=commit_message, a=True, date=next(commit_timestamp_gen)) # Setup: take measurement before running the version command head_sha_before = repo.head.commit.hexsha tags_before = {tag.name for tag in repo.tags} # Derive the cli arguments based on parameter input prerelease_args = list( filter( None, [ "--as-prerelease" if prerelease else "", *(["--prerelease-token", prerelease_token] if prerelease else []), ], ) ) # Act cli_cmd = [MAIN_PROG_NAME, VERSION_SUBCMD, *prerelease_args] result = cli_runner.invoke(main, cli_cmd[1:]) # take measurement after running the version command head_after = repo.head.commit tags_after = {tag.name for tag in repo.tags} tags_set_difference = set.difference(tags_after, tags_before) # Evaluate (normal release actions should have occurred when forced patch bump) assert_successful_exit_code(result, cli_cmd) # A commit has been made (regardless of precommit) assert [head_sha_before] == [head.hexsha for head in head_after.parents] assert len(tags_set_difference) == 1 # A tag has been created assert f"v{next_release_version}" in tags_set_difference assert mocked_git_push.call_count == 2 # 1 for commit, 1 for tag assert post_mocker.call_count == 1 # vcs release creation occurred @pytest.mark.parametrize( str.join( ", ", [ "repo_result", "commit_messages", "prerelease", "prerelease_token", "major_on_zero", "allow_zero_version", "next_release_version", "branch_name", ], ), xdist_sort_hack( [ *[ pytest.param( lazy_fixture(repo_fixture_name), commit_messages, prerelease, "rc" if prerelease_token is None else prerelease_token, major_on_zero, allow_zero_version, next_release_version, "main" if branch_name is None else branch_name, marks=pytest.mark.comprehensive, ) for (repo_fixture_name, prerelease_token), values in { # Latest version for repo_with_single_branch is currently 0.1.1 # Note repo_with_single_branch isn't modelled with prereleases ( repo_w_trunk_only_scipy_commits.__name__, None, ): [ *( # when prerelease must be False, and allow_zero_version is True, # the version is not bumped because of non valuable changes regardless # of the major_on_zero value (commits, False, major_on_zero, True, "0.1.1", None) for major_on_zero in (True, False) for commits in ( None, lazy_fixture(scipy_chore_commits.__name__), ) ), ], # Latest version for repo_with_single_branch_and_prereleases is # currently 0.2.0 ( repo_w_trunk_only_n_prereleases_scipy_commits.__name__, None, ): [ *( # when allow_zero_version is True, the version is not bumped # regardless of prerelease and major_on_zero values when given # non valuable changes (commits, prerelease, major_on_zero, True, "0.2.0", None) for prerelease in (True, False) for major_on_zero in (True, False) for commits in ( None, lazy_fixture(scipy_chore_commits.__name__), ) ), ], }.items() for ( commit_messages, prerelease, major_on_zero, allow_zero_version, next_release_version, branch_name, ) in values # type: ignore[attr-defined] ], ], ), ) def test_version_next_w_zero_dot_versions_no_bump_scipy( repo_result: BuiltRepoResult, commit_messages: list[str], prerelease: bool, prerelease_token: str, next_release_version: str, branch_name: str, major_on_zero: bool, allow_zero_version: bool, cli_runner: CliRunner, file_in_repo: str, update_pyproject_toml: UpdatePyprojectTomlFn, mocked_git_push: MagicMock, post_mocker: Mocker, stable_now_date: GetStableDateNowFn, ): repo = repo_result["repo"] # setup: select the branch we desire for the next bump if repo.active_branch.name != branch_name: repo.heads[branch_name].checkout() # setup: update pyproject.toml with the necessary settings update_pyproject_toml( "tool.semantic_release.allow_zero_version", allow_zero_version ) update_pyproject_toml("tool.semantic_release.major_on_zero", major_on_zero) # setup: apply commits to the repo stable_now_datetime = stable_now_date() commit_timestamp_gen = ( (stable_now_datetime + timedelta(seconds=i)).isoformat(timespec="seconds") for i in count(step=1) ) for commit_message in commit_messages or []: add_text_to_file(repo, file_in_repo) repo.git.commit(m=commit_message, a=True, date=next(commit_timestamp_gen)) # Setup: take measurement before running the version command head_sha_before = repo.head.commit.hexsha tags_before = {tag.name for tag in repo.tags} # Derive the cli arguments based on parameter input prerelease_args = list( filter( None, [ "--as-prerelease" if prerelease else "", *(["--prerelease-token", prerelease_token] if prerelease else []), ], ) ) # Act cli_cmd = [MAIN_PROG_NAME, VERSION_SUBCMD, *prerelease_args] result = cli_runner.invoke(main, cli_cmd[1:]) # take measurement after running the version command head_after = repo.head.commit tags_after = {tag.name for tag in repo.tags} tags_set_difference = set.difference(tags_after, tags_before) # Evaluate (no release actions should have occurred when no bump) assert_successful_exit_code(result, cli_cmd) assert f"{next_release_version}\n" == result.stdout # No commit has been made assert head_sha_before == head_after.hexsha assert len(tags_set_difference) == 0 # No tag created assert mocked_git_push.call_count == 0 # no git push of tag or commit assert post_mocker.call_count == 0 # no vcs release @pytest.mark.parametrize( str.join( " ,", [ "repo_result", "commit_parser", "commit_messages", "prerelease", "prerelease_token", "major_on_zero", "allow_zero_version", "next_release_version", "branch_name", ], ), xdist_sort_hack( [ ( # Latest version for repo_w_initial_commit is currently 0.0.0 # with no changes made it should be 0.0.0 lazy_fixture(repo_w_initial_commit.__name__), ConventionalCommitParser.__name__.replace("CommitParser", "").lower(), None, False, "rc", False, True, "0.0.0", "main", ), *[ pytest.param( lazy_fixture(repo_w_initial_commit.__name__), str.replace(parser_class_name, "CommitParser", "").lower(), commit_messages, prerelease, prerelease_token, major_on_zero, allow_zero_version, next_release_version, "main", marks=pytest.mark.comprehensive, ) for prerelease_token, values in { # Latest version for repo_with_no_tags is currently 0.0.0 (default) # It's biggest change type is minor, so the next version should be 0.1.0 "rc": [ *( # when prerelease is False, major_on_zero is True & False, & allow_zero_version is True # the version should be 0.0.0, when no distintive changes have been made since the # start of the project (commits, parser, prerelease, major_on_zero, True, "0.0.0") for prerelease in (True, False) for major_on_zero in (True, False) for commits, parser in ( # No commits added, so base is just initial commit at 0.0.0 (None, ConventionalCommitParser.__name__), # Chore like commits also don't trigger a version bump so it stays 0.0.0 ( lazy_fixture(conventional_chore_commits.__name__), ConventionalCommitParser.__name__, ), ( lazy_fixture(emoji_chore_commits.__name__), EmojiCommitParser.__name__, ), ( lazy_fixture(scipy_chore_commits.__name__), ScipyCommitParser.__name__, ), ) ), *( (commits, parser, True, major_on_zero, True, "0.0.1-rc.1") for major_on_zero in (True, False) for commits, parser in ( # when prerelease is True & allow_zero_version is True, the version should be # a patch bump as a prerelease version, because of the patch level commits # major_on_zero is irrelevant here as we are only applying patch commits ( lazy_fixture(conventional_patch_commits.__name__), ConventionalCommitParser.__name__, ), ( lazy_fixture(emoji_patch_commits.__name__), EmojiCommitParser.__name__, ), ( lazy_fixture(scipy_patch_commits.__name__), ScipyCommitParser.__name__, ), ) ), *( (commits, parser, False, major_on_zero, True, "0.0.1") for major_on_zero in (True, False) for commits, parser in ( # when prerelease is False, & allow_zero_version is True, the version should be # a patch bump because of the patch commits added # major_on_zero is irrelevant here as we are only applying patch commits ( lazy_fixture(conventional_patch_commits.__name__), ConventionalCommitParser.__name__, ), ( lazy_fixture(emoji_patch_commits.__name__), EmojiCommitParser.__name__, ), ( lazy_fixture(scipy_patch_commits.__name__), ScipyCommitParser.__name__, ), ) ), *( (commits, parser, True, False, True, "0.1.0-rc.1") for commits, parser in ( # when prerelease is False, & major_on_zero is False, the version should be # a minor bump because of the minor commits added ( lazy_fixture(conventional_minor_commits.__name__), ConventionalCommitParser.__name__, ), ( lazy_fixture(emoji_minor_commits.__name__), EmojiCommitParser.__name__, ), ( lazy_fixture(scipy_minor_commits.__name__), ScipyCommitParser.__name__, ), # Given the major_on_zero is False and the version is starting at 0.0.0, # the major level commits are limited to only causing a minor level bump ( lazy_fixture(conventional_major_commits.__name__), ConventionalCommitParser.__name__, ), ( lazy_fixture(emoji_major_commits.__name__), EmojiCommitParser.__name__, ), ( lazy_fixture(scipy_major_commits.__name__), ScipyCommitParser.__name__, ), ) ), *( (commits, parser, False, False, True, "0.1.0") for commits, parser in ( # when prerelease is False, # major_on_zero is False, & allow_zero_version is True # the version should be a minor bump of 0.0.0 # because of the minor commits added and zero version is allowed ( lazy_fixture(conventional_minor_commits.__name__), ConventionalCommitParser.__name__, ), ( lazy_fixture(emoji_minor_commits.__name__), EmojiCommitParser.__name__, ), ( lazy_fixture(scipy_minor_commits.__name__), ScipyCommitParser.__name__, ), # Given the major_on_zero is False and the version is starting at 0.0.0, # the major level commits are limited to only causing a minor level bump ( lazy_fixture(conventional_major_commits.__name__), ConventionalCommitParser.__name__, ), ( lazy_fixture(emoji_major_commits.__name__), EmojiCommitParser.__name__, ), ( lazy_fixture(scipy_major_commits.__name__), ScipyCommitParser.__name__, ), ) ), *( # when prerelease is True, & allow_zero_version is False, the version should be # a prerelease version 1.0.0-rc.1, across the board when any valuable change # is made because of the allow_zero_version is False, major_on_zero is ignored # when allow_zero_version is False (but we still test it) (commits, parser, True, major_on_zero, False, "1.0.0-rc.1") for major_on_zero in (True, False) for commits, parser in ( # parser doesn't matter here as long as it detects a NO_RELEASE on Initial Commit (None, ConventionalCommitParser.__name__), ( lazy_fixture(conventional_chore_commits.__name__), ConventionalCommitParser.__name__, ), ( lazy_fixture(conventional_patch_commits.__name__), ConventionalCommitParser.__name__, ), ( lazy_fixture(conventional_minor_commits.__name__), ConventionalCommitParser.__name__, ), ( lazy_fixture(conventional_major_commits.__name__), ConventionalCommitParser.__name__, ), ( lazy_fixture(emoji_chore_commits.__name__), EmojiCommitParser.__name__, ), ( lazy_fixture(emoji_patch_commits.__name__), EmojiCommitParser.__name__, ), ( lazy_fixture(emoji_minor_commits.__name__), EmojiCommitParser.__name__, ), ( lazy_fixture(emoji_major_commits.__name__), EmojiCommitParser.__name__, ), ( lazy_fixture(scipy_chore_commits.__name__), ScipyCommitParser.__name__, ), ( lazy_fixture(scipy_patch_commits.__name__), ScipyCommitParser.__name__, ), ( lazy_fixture(scipy_minor_commits.__name__), ScipyCommitParser.__name__, ), ( lazy_fixture(scipy_major_commits.__name__), ScipyCommitParser.__name__, ), ) ), *( # when prerelease is True, & allow_zero_version is False, the version should be # 1.0.0, across the board when any valuable change # is made because of the allow_zero_version is False. major_on_zero is ignored # when allow_zero_version is False (but we still test it) (commits, parser, False, major_on_zero, False, "1.0.0") for major_on_zero in (True, False) for commits, parser in ( (None, ConventionalCommitParser.__name__), ( lazy_fixture(conventional_chore_commits.__name__), ConventionalCommitParser.__name__, ), ( lazy_fixture(conventional_patch_commits.__name__), ConventionalCommitParser.__name__, ), ( lazy_fixture(conventional_minor_commits.__name__), ConventionalCommitParser.__name__, ), ( lazy_fixture(conventional_major_commits.__name__), ConventionalCommitParser.__name__, ), ( lazy_fixture(emoji_chore_commits.__name__), EmojiCommitParser.__name__, ), ( lazy_fixture(emoji_patch_commits.__name__), EmojiCommitParser.__name__, ), ( lazy_fixture(emoji_minor_commits.__name__), EmojiCommitParser.__name__, ), ( lazy_fixture(emoji_major_commits.__name__), EmojiCommitParser.__name__, ), ( lazy_fixture(scipy_chore_commits.__name__), ScipyCommitParser.__name__, ), ( lazy_fixture(scipy_patch_commits.__name__), ScipyCommitParser.__name__, ), ( lazy_fixture(scipy_minor_commits.__name__), ScipyCommitParser.__name__, ), ( lazy_fixture(scipy_major_commits.__name__), ScipyCommitParser.__name__, ), ) ), ], }.items() for ( commit_messages, parser_class_name, prerelease, major_on_zero, allow_zero_version, next_release_version, ) in values # type: ignore[attr-defined] ], ], ), ) def test_version_next_w_zero_dot_versions_minimums( repo_result: BuiltRepoResult, commit_parser: str, commit_messages: list[str], prerelease: bool, prerelease_token: str, next_release_version: str, branch_name: str, major_on_zero: bool, allow_zero_version: bool, cli_runner: CliRunner, file_in_repo: str, update_pyproject_toml: UpdatePyprojectTomlFn, mocked_git_push: MagicMock, post_mocker: Mocker, stable_now_date: GetStableDateNowFn, ): repo = repo_result["repo"] # setup: select the branch we desire for the next bump if repo.active_branch.name != branch_name: repo.heads[branch_name].checkout() # setup: update pyproject.toml with the necessary settings update_pyproject_toml("tool.semantic_release.commit_parser", commit_parser) update_pyproject_toml( "tool.semantic_release.allow_zero_version", allow_zero_version ) update_pyproject_toml("tool.semantic_release.major_on_zero", major_on_zero) # setup: apply commits to the repo stable_now_datetime = stable_now_date() commit_timestamp_gen = ( (stable_now_datetime + timedelta(seconds=i)).isoformat(timespec="seconds") for i in count(step=1) ) for commit_message in commit_messages or []: add_text_to_file(repo, file_in_repo) repo.git.commit(m=commit_message, a=True, date=next(commit_timestamp_gen)) # Setup: take measurement before running the version command head_sha_before = repo.head.commit.hexsha tags_before = {tag.name for tag in repo.tags} # Derive the cli arguments based on parameter input prerelease_args = list( filter( None, [ "--as-prerelease" if prerelease else "", *(["--prerelease-token", prerelease_token] if prerelease else []), ], ) ) # Act cli_cmd = [MAIN_PROG_NAME, VERSION_SUBCMD, *prerelease_args] result = cli_runner.invoke(main, cli_cmd[1:]) # take measurement after running the version command head_after = repo.head.commit tags_after = {tag.name for tag in repo.tags} tags_set_difference = set.difference(tags_after, tags_before) # Evaluate (normal release actions should have occurred when forced patch bump) assert_successful_exit_code(result, cli_cmd) # A commit has been made (regardless of precommit) assert [head_sha_before] == [head.hexsha for head in head_after.parents] assert len(tags_set_difference) == 1 # A tag has been created assert f"v{next_release_version}" in tags_set_difference assert mocked_git_push.call_count == 2 # 1 for commit, 1 for tag assert post_mocker.call_count == 1 # vcs release creation occurred python-semantic-release-9.21.0/tests/e2e/cmd_version/test_version_changelog.py000066400000000000000000000652361475670435200276260ustar00rootroot00000000000000from __future__ import annotations import os from datetime import datetime, timezone from typing import TYPE_CHECKING import pytest from freezegun import freeze_time from pytest_lazy_fixtures.lazy_fixture import lf as lazy_fixture from semantic_release.changelog.context import ChangelogMode from semantic_release.cli.commands.main import main from semantic_release.cli.config import ChangelogOutputFormat from tests.const import MAIN_PROG_NAME, VERSION_SUBCMD from tests.fixtures.example_project import ( default_md_changelog_insertion_flag, default_rst_changelog_insertion_flag, example_changelog_md, example_changelog_rst, ) from tests.fixtures.repos import ( repo_w_git_flow_w_alpha_prereleases_n_conventional_commits, repo_w_git_flow_w_alpha_prereleases_n_emoji_commits, repo_w_git_flow_w_alpha_prereleases_n_scipy_commits, repo_w_git_flow_w_rc_n_alpha_prereleases_n_conventional_commits, repo_w_git_flow_w_rc_n_alpha_prereleases_n_conventional_commits_using_tag_format, repo_w_git_flow_w_rc_n_alpha_prereleases_n_emoji_commits, repo_w_git_flow_w_rc_n_alpha_prereleases_n_scipy_commits, repo_w_github_flow_w_default_release_channel_conventional_commits, repo_w_github_flow_w_default_release_channel_emoji_commits, repo_w_github_flow_w_default_release_channel_scipy_commits, repo_w_github_flow_w_feature_release_channel_conventional_commits, repo_w_github_flow_w_feature_release_channel_emoji_commits, repo_w_github_flow_w_feature_release_channel_scipy_commits, repo_w_no_tags_conventional_commits, repo_w_no_tags_emoji_commits, repo_w_no_tags_scipy_commits, repo_w_trunk_only_conventional_commits, repo_w_trunk_only_emoji_commits, repo_w_trunk_only_n_prereleases_conventional_commits, repo_w_trunk_only_n_prereleases_emoji_commits, repo_w_trunk_only_n_prereleases_scipy_commits, repo_w_trunk_only_scipy_commits, ) from tests.util import assert_successful_exit_code if TYPE_CHECKING: from pathlib import Path from click.testing import CliRunner from tests.conftest import FormatDateStrFn, GetStableDateNowFn from tests.fixtures.example_project import UpdatePyprojectTomlFn from tests.fixtures.git_repo import ( BuiltRepoResult, CommitConvention, GetCommitsFromRepoBuildDefFn, GetVersionsFromRepoBuildDefFn, ) @pytest.mark.parametrize( "changelog_file, insertion_flag", [ ( # ChangelogOutputFormat.MARKDOWN lazy_fixture(example_changelog_md.__name__), lazy_fixture(default_md_changelog_insertion_flag.__name__), ), ( # ChangelogOutputFormat.RESTRUCTURED_TEXT lazy_fixture(example_changelog_rst.__name__), lazy_fixture(default_rst_changelog_insertion_flag.__name__), ), ], ) @pytest.mark.parametrize( "repo_result, cache_key, tag_format", [ ( lazy_fixture(repo_w_trunk_only_conventional_commits.__name__), f"psr/repos/{repo_w_trunk_only_conventional_commits.__name__}", "v{version}", ), *[ pytest.param( lazy_fixture(repo_fixture), f"psr/repos/{repo_fixture}", "v{version}" if tag_format is None else tag_format, marks=pytest.mark.comprehensive, ) for repo_fixture, tag_format in [ # Must have a previous release/tag *[ ( repo_fixture_name, None, ) for repo_fixture_name in [ # repo_with_single_branch_conventional_commits.__name__, # default repo_w_trunk_only_emoji_commits.__name__, repo_w_trunk_only_scipy_commits.__name__, ] ], *[ ( repo_fixture_name, None, ) for repo_fixture_name in [ repo_w_trunk_only_n_prereleases_conventional_commits.__name__, repo_w_trunk_only_n_prereleases_emoji_commits.__name__, repo_w_trunk_only_n_prereleases_scipy_commits.__name__, ] ], *[ ( repo_fixture_name, None, ) for repo_fixture_name in [ repo_w_github_flow_w_default_release_channel_conventional_commits.__name__, repo_w_github_flow_w_default_release_channel_emoji_commits.__name__, repo_w_github_flow_w_default_release_channel_scipy_commits.__name__, ] ], *[ ( repo_fixture_name, None, ) for repo_fixture_name in [ repo_w_github_flow_w_feature_release_channel_conventional_commits.__name__, repo_w_github_flow_w_feature_release_channel_emoji_commits.__name__, repo_w_github_flow_w_feature_release_channel_scipy_commits.__name__, ] ], *[ ( repo_fixture_name, None, ) for repo_fixture_name in [ repo_w_git_flow_w_alpha_prereleases_n_conventional_commits.__name__, repo_w_git_flow_w_alpha_prereleases_n_emoji_commits.__name__, repo_w_git_flow_w_alpha_prereleases_n_scipy_commits.__name__, ] ], *[ ( repo_fixture_name, None, ) for repo_fixture_name in [ repo_w_git_flow_w_rc_n_alpha_prereleases_n_conventional_commits.__name__, repo_w_git_flow_w_rc_n_alpha_prereleases_n_emoji_commits.__name__, repo_w_git_flow_w_rc_n_alpha_prereleases_n_scipy_commits.__name__, ] ], *[ ( repo_fixture_name, "submod-v{version}", ) for repo_fixture_name in [ repo_w_git_flow_w_rc_n_alpha_prereleases_n_conventional_commits_using_tag_format.__name__, ] ], ] ], ], ) def test_version_updates_changelog_w_new_version( repo_result: BuiltRepoResult, get_versions_from_repo_build_def: GetVersionsFromRepoBuildDefFn, tag_format: str, update_pyproject_toml: UpdatePyprojectTomlFn, cli_runner: CliRunner, changelog_file: Path, insertion_flag: str, cache: pytest.Cache, cache_key: str, stable_now_date: GetStableDateNowFn, ): """ Given a previously released custom modified changelog file, When the version command is run with changelog.mode set to "update", Then the version is created and the changelog file is updated with new release info while maintaining the previously customized content """ repo = repo_result["repo"] latest_tag = tag_format.format( version=get_versions_from_repo_build_def(repo_result["definition"])[-1] ) if not (repo_build_data := cache.get(cache_key, None)): pytest.fail("Repo build date not found in cache") repo_build_datetime = datetime.strptime(repo_build_data["build_date"], "%Y-%m-%d") now_datetime = stable_now_date().replace( year=repo_build_datetime.year, month=repo_build_datetime.month, day=repo_build_datetime.day, ) # Custom text to maintain (must be different from the default) custom_text = "---{ls}{ls}Custom footer text{ls}".format(ls=os.linesep) # Capture expected changelog content with changelog_file.open(newline=os.linesep) as rfd: initial_changelog_parts = rfd.read().split(insertion_flag) expected_changelog_content = str.join( insertion_flag, [ initial_changelog_parts[0], str.join( os.linesep, [ initial_changelog_parts[1], "", custom_text, ], ), ], ) # Reverse last release repo.git.tag("-d", latest_tag) repo.git.reset("--hard", "HEAD~1") # Set the project configurations update_pyproject_toml( "tool.semantic_release.changelog.mode", ChangelogMode.UPDATE.value ) update_pyproject_toml( "tool.semantic_release.changelog.default_templates.changelog_file", str(changelog_file.name), ) # Modify the current changelog with our custom text at bottom # Universal newlines is ok here since we are writing it back out # and not working with the os-specific insertion flag changelog_file.write_text( str.join( "\n", [ changelog_file.read_text(), "", custom_text, ], ) ) with freeze_time(now_datetime.astimezone(timezone.utc)): cli_cmd = [MAIN_PROG_NAME, VERSION_SUBCMD, "--no-push", "--changelog"] result = cli_runner.invoke(main, cli_cmd[1:]) # Capture the new changelog content (os aware because of expected content) with changelog_file.open(newline=os.linesep) as rfd: actual_content = rfd.read() # Evaluate assert_successful_exit_code(result, cli_cmd) assert expected_changelog_content == actual_content @pytest.mark.parametrize( "changelog_format, changelog_file, insertion_flag", [ ( ChangelogOutputFormat.MARKDOWN, lazy_fixture(example_changelog_md.__name__), lazy_fixture(default_md_changelog_insertion_flag.__name__), ), ( ChangelogOutputFormat.RESTRUCTURED_TEXT, lazy_fixture(example_changelog_rst.__name__), lazy_fixture(default_rst_changelog_insertion_flag.__name__), ), ], ) @pytest.mark.parametrize( "repo_result, cache_key", [ ( lazy_fixture(repo_w_no_tags_conventional_commits.__name__), f"psr/repos/{repo_w_no_tags_conventional_commits.__name__}", ), *[ pytest.param( lazy_fixture(repo_fixture), f"psr/repos/{repo_fixture}", marks=pytest.mark.comprehensive, ) for repo_fixture in [ # Must not have a single release/tag # repo_with_no_tags_conventional_commits.__name__, # default repo_w_no_tags_emoji_commits.__name__, repo_w_no_tags_scipy_commits.__name__, ] ], ], ) def test_version_updates_changelog_wo_prev_releases( repo_result: BuiltRepoResult, cache_key: str, cache: pytest.Cache, cli_runner: CliRunner, update_pyproject_toml: UpdatePyprojectTomlFn, changelog_format: ChangelogOutputFormat, changelog_file: Path, insertion_flag: str, stable_now_date: GetStableDateNowFn, format_date_str: FormatDateStrFn, ): """ Given the repository has no releases and the user has provided a initialized changelog, When the version command is run with changelog.mode set to "update", Then the version is created and the changelog file is updated with new release info """ if not (repo_build_data := cache.get(cache_key, None)): pytest.fail("Repo build date not found in cache") repo_build_datetime = datetime.strptime(repo_build_data["build_date"], "%Y-%m-%d") now_datetime = stable_now_date().replace( year=repo_build_datetime.year, month=repo_build_datetime.month, day=repo_build_datetime.day, ) repo_build_date_str = format_date_str(now_datetime) # Custom text to maintain (must be different from the default) custom_text = "---{ls}{ls}Custom footer text{ls}".format(ls=os.linesep) # Set the project configurations update_pyproject_toml( "tool.semantic_release.changelog.mode", ChangelogMode.UPDATE.value ) update_pyproject_toml( "tool.semantic_release.changelog.default_templates.changelog_file", str(changelog_file.name), ) version = "v0.1.0" rst_version_header = f"{version} ({repo_build_date_str})" search_n_replacements = { ChangelogOutputFormat.MARKDOWN: ( "## Unreleased", f"## {version} ({repo_build_date_str})", ), ChangelogOutputFormat.RESTRUCTURED_TEXT: ( ".. _changelog-unreleased:{ls}{ls}Unreleased{ls}{underline}".format( ls=os.linesep, underline="=" * len("Unreleased"), ), str.join( os.linesep, [ f".. _changelog-{version}:", "", rst_version_header, f"{'=' * len(rst_version_header)}", ], ), ), } search_text = search_n_replacements[changelog_format][0] replacement_text = search_n_replacements[changelog_format][1] # Capture and modify the current changelog content to become the expected output # We much use os.linesep here since the insertion flag is os-specific with changelog_file.open(newline=os.linesep) as rfd: initial_changelog_parts = rfd.read().split(insertion_flag) # content is os-specific because of the insertion flag & how we read the original file expected_changelog_content = str.join( insertion_flag, [ initial_changelog_parts[0], str.join( os.linesep, [ initial_changelog_parts[1].replace( search_text, replacement_text, ), "", custom_text, ], ), ], ) # Grab the Unreleased changelog & create the initalized user changelog # force output to not perform any newline translations with changelog_file.open(mode="w", newline="") as wfd: wfd.write( str.join( insertion_flag, [initial_changelog_parts[0], f"{os.linesep * 2}{custom_text}"], ) ) wfd.flush() # Act with freeze_time(now_datetime.astimezone(timezone.utc)): cli_cmd = [MAIN_PROG_NAME, VERSION_SUBCMD, "--no-push", "--changelog"] result = cli_runner.invoke(main, cli_cmd[1:]) # Evaluate assert_successful_exit_code(result, cli_cmd) # Ensure changelog exists assert changelog_file.exists() # Capture the new changelog content (os aware because of expected content) with changelog_file.open(newline=os.linesep) as rfd: actual_content = rfd.read() # Check that the changelog footer is maintained and updated with Unreleased info assert expected_changelog_content == actual_content @pytest.mark.parametrize( "changelog_file", [ lazy_fixture(example_changelog_md.__name__), lazy_fixture(example_changelog_rst.__name__), ], ) @pytest.mark.parametrize( "repo_result, cache_key, tag_format", [ ( lazy_fixture(repo_w_trunk_only_conventional_commits.__name__), f"psr/repos/{repo_w_trunk_only_conventional_commits.__name__}", "v{version}", ), *[ pytest.param( lazy_fixture(repo_fixture), f"psr/repos/{repo_fixture}", "v{version}" if tag_format is None else tag_format, marks=pytest.mark.comprehensive, ) for repo_fixture, tag_format in [ # Must have a previous release/tag *[ ( repo_fixture_name, None, ) for repo_fixture_name in [ # repo_with_single_branch_conventional_commits.__name__, # default repo_w_trunk_only_emoji_commits.__name__, repo_w_trunk_only_scipy_commits.__name__, ] ], *[ ( repo_fixture_name, None, ) for repo_fixture_name in [ repo_w_trunk_only_n_prereleases_conventional_commits.__name__, repo_w_trunk_only_n_prereleases_emoji_commits.__name__, repo_w_trunk_only_n_prereleases_scipy_commits.__name__, ] ], *[ ( repo_fixture_name, None, ) for repo_fixture_name in [ repo_w_github_flow_w_default_release_channel_conventional_commits.__name__, repo_w_github_flow_w_default_release_channel_emoji_commits.__name__, repo_w_github_flow_w_default_release_channel_scipy_commits.__name__, ] ], *[ ( repo_fixture_name, None, ) for repo_fixture_name in [ repo_w_github_flow_w_feature_release_channel_conventional_commits.__name__, repo_w_github_flow_w_feature_release_channel_emoji_commits.__name__, repo_w_github_flow_w_feature_release_channel_scipy_commits.__name__, ] ], *[ ( repo_fixture_name, None, ) for repo_fixture_name in [ repo_w_git_flow_w_alpha_prereleases_n_conventional_commits.__name__, repo_w_git_flow_w_alpha_prereleases_n_emoji_commits.__name__, repo_w_git_flow_w_alpha_prereleases_n_scipy_commits.__name__, ] ], *[ ( repo_fixture_name, None, ) for repo_fixture_name in [ repo_w_git_flow_w_rc_n_alpha_prereleases_n_conventional_commits.__name__, repo_w_git_flow_w_rc_n_alpha_prereleases_n_emoji_commits.__name__, repo_w_git_flow_w_rc_n_alpha_prereleases_n_scipy_commits.__name__, ] ], *[ ( repo_fixture_name, "submod-v{version}", ) for repo_fixture_name in [ repo_w_git_flow_w_rc_n_alpha_prereleases_n_conventional_commits_using_tag_format.__name__, ] ], ] ], ], ) def test_version_initializes_changelog_in_update_mode_w_no_prev_changelog( repo_result: BuiltRepoResult, cache_key: str, get_versions_from_repo_build_def: GetVersionsFromRepoBuildDefFn, tag_format: str, cli_runner: CliRunner, update_pyproject_toml: UpdatePyprojectTomlFn, changelog_file: Path, cache: pytest.Cache, stable_now_date: GetStableDateNowFn, ): """ Given that the changelog file does not exist, When the version command is run with changelog.mode set to "update", Then the version is created and the changelog file is initialized with the default content. """ repo = repo_result["repo"] latest_tag = tag_format.format( version=get_versions_from_repo_build_def(repo_result["definition"])[-1] ) if not (repo_build_data := cache.get(cache_key, None)): pytest.fail("Repo build date not found in cache") repo_build_datetime = datetime.strptime(repo_build_data["build_date"], "%Y-%m-%d") now_datetime = stable_now_date().replace( year=repo_build_datetime.year, month=repo_build_datetime.month, day=repo_build_datetime.day, ) # Capture the expected changelog content expected_changelog_content = changelog_file.read_text() # Reverse last release repo.git.tag("-d", latest_tag) repo.git.reset("--hard", "HEAD~1") # Set the project configurations update_pyproject_toml( "tool.semantic_release.changelog.mode", ChangelogMode.UPDATE.value ) update_pyproject_toml( "tool.semantic_release.changelog.default_templates.changelog_file", str(changelog_file.name), ) # Remove any previous changelog to update os.remove(str(changelog_file.resolve())) # Act with freeze_time(now_datetime.astimezone(timezone.utc)): cli_cmd = [MAIN_PROG_NAME, VERSION_SUBCMD, "--no-push", "--changelog"] result = cli_runner.invoke(main, cli_cmd[1:]) # Evaluate assert_successful_exit_code(result, cli_cmd) # Check that the changelog file was re-created assert changelog_file.exists() actual_content = changelog_file.read_text() # Check that the changelog content is the same as before assert expected_changelog_content == actual_content @pytest.mark.parametrize( "changelog_file, insertion_flag", [ ( lazy_fixture(example_changelog_md.__name__), lazy_fixture(default_md_changelog_insertion_flag.__name__), ), ( lazy_fixture(example_changelog_rst.__name__), lazy_fixture(default_rst_changelog_insertion_flag.__name__), ), ], ) @pytest.mark.usefixtures(repo_w_trunk_only_conventional_commits.__name__) def test_version_maintains_changelog_in_update_mode_w_no_flag( changelog_file: Path, cli_runner: CliRunner, update_pyproject_toml: UpdatePyprojectTomlFn, insertion_flag: str, ): """ Given that the changelog file exists but does not contain the insertion flag, When the version command is run with changelog.mode set to "update", Then the version is created but the changelog file is not updated. """ update_pyproject_toml( "tool.semantic_release.changelog.mode", ChangelogMode.UPDATE.value ) update_pyproject_toml( "tool.semantic_release.changelog.default_templates.changelog_file", str(changelog_file.name), ) # Remove the insertion flag from the existing changelog with changelog_file.open(newline=os.linesep) as rfd: expected_changelog_content = rfd.read().replace( f"{insertion_flag}{os.linesep}", "", 1, ) # no newline translations with changelog_file.open("w", newline="") as wfd: wfd.write(expected_changelog_content) # Act cli_cmd = [MAIN_PROG_NAME, VERSION_SUBCMD, "--no-push", "--changelog"] result = cli_runner.invoke(main, cli_cmd[1:]) # Evaluate assert_successful_exit_code(result, cli_cmd) # Ensure changelog exists assert changelog_file.exists() # Capture the new changelog content (os aware because of expected content) with changelog_file.open(newline=os.linesep) as rfd: actual_content = rfd.read() # Check that the changelog content is the same as before assert expected_changelog_content == actual_content @pytest.mark.parametrize( "changelog_file", [ lazy_fixture(example_changelog_md.__name__), lazy_fixture(example_changelog_rst.__name__), ], ) @pytest.mark.parametrize( "repo_result, cache_key, commit_type, tag_format", [ ( lazy_fixture(repo_fixture), f"psr/repos/{repo_fixture}", repo_fixture.split("_")[-2], "v{version}", ) for repo_fixture in [ # Must have a previous release/tag repo_w_trunk_only_conventional_commits.__name__, ] ], ) def test_version_updates_changelog_w_new_version_n_filtered_commit( repo_result: BuiltRepoResult, cache: pytest.Cache, cache_key: str, get_versions_from_repo_build_def: GetVersionsFromRepoBuildDefFn, commit_type: CommitConvention, tag_format: str, update_pyproject_toml: UpdatePyprojectTomlFn, cli_runner: CliRunner, changelog_file: Path, stable_now_date: GetStableDateNowFn, get_commits_from_repo_build_def: GetCommitsFromRepoBuildDefFn, ): """ Given a project that has a version bumping change but also an exclusion pattern for the same change type, When the version command is run, Then the version is created and the changelog file is updated with the excluded commit info anyway. """ repo = repo_result["repo"] latest_version = get_versions_from_repo_build_def(repo_result["definition"])[-1] latest_tag = tag_format.format(version=latest_version) repo_definition = get_commits_from_repo_build_def(repo_result["definition"]) if not (repo_build_data := cache.get(cache_key, None)): pytest.fail("Repo build date not found in cache") repo_build_datetime = datetime.strptime(repo_build_data["build_date"], "%Y-%m-%d") now_datetime = stable_now_date().replace( year=repo_build_datetime.year, month=repo_build_datetime.month, day=repo_build_datetime.day, ) # expected version bump commit (that should be in changelog) bumping_commit = repo_definition[latest_version]["commits"][-1] expected_bump_message = bumping_commit["desc"].capitalize() # Capture the expected changelog content expected_changelog_content = changelog_file.read_text() # Reverse last release repo.git.tag("-d", latest_tag) repo.git.reset("--hard", "HEAD~1") # Set the project configurations update_pyproject_toml( "tool.semantic_release.changelog.mode", ChangelogMode.UPDATE.value ) update_pyproject_toml( "tool.semantic_release.changelog.default_templates.changelog_file", str(changelog_file.name), ) update_pyproject_toml( "tool.semantic_release.changelog.exclude_commit_patterns", [f"{bumping_commit['msg'].split(':', maxsplit=1)[0]}: .*"], ) # Act with freeze_time(now_datetime.astimezone(timezone.utc)): cli_cmd = [MAIN_PROG_NAME, VERSION_SUBCMD, "--no-push", "--changelog"] result = cli_runner.invoke(main, cli_cmd[1:]) # Capture the new changelog content (os aware because of expected content) actual_content = changelog_file.read_text() # Evaluate assert_successful_exit_code(result, cli_cmd) assert expected_changelog_content == actual_content assert expected_bump_message in actual_content python-semantic-release-9.21.0/tests/e2e/cmd_version/test_version_changelog_custom_commit_msg.py000066400000000000000000000164121475670435200334260ustar00rootroot00000000000000from __future__ import annotations from datetime import datetime, timedelta, timezone from os import remove as delete_file from textwrap import dedent from typing import TYPE_CHECKING import pytest from freezegun import freeze_time from pytest_lazy_fixtures.lazy_fixture import lf as lazy_fixture from semantic_release.changelog.context import ChangelogMode from semantic_release.cli.commands.main import main from tests.const import ( MAIN_PROG_NAME, VERSION_SUBCMD, ) from tests.e2e.conftest import ( get_sanitized_md_changelog_content, get_sanitized_rst_changelog_content, ) from tests.fixtures.example_project import ( changelog_md_file, changelog_rst_file, ) from tests.fixtures.repos import ( repo_w_trunk_only_conventional_commits, ) from tests.util import ( assert_successful_exit_code, ) if TYPE_CHECKING: from pathlib import Path from typing import TypedDict from click.testing import CliRunner from tests.conftest import GetStableDateNowFn from tests.e2e.conftest import GetSanitizedChangelogContentFn from tests.fixtures.example_project import UpdatePyprojectTomlFn from tests.fixtures.git_repo import ( BuildRepoFromDefinitionFn, BuiltRepoResult, CommitDef, GetCfgValueFromDefFn, GetVersionsFromRepoBuildDefFn, RepoActions, SplitRepoActionsByReleaseTagsFn, ) class Commit2Section(TypedDict): conventional: Commit2SectionCommit emoji: Commit2SectionCommit scipy: Commit2SectionCommit class Commit2SectionCommit(TypedDict): commit: CommitDef section: str @pytest.mark.parametrize( str.join( ", ", [ "custom_commit_message", "changelog_mode", "changelog_file", "get_sanitized_changelog_content", "repo_result", "cache_key", ], ), [ pytest.param( custom_commit_message, changelog_mode, lazy_fixture(changelog_file), lazy_fixture(cl_sanitizer), lazy_fixture(repo_fixture_name), f"psr/repos/{repo_fixture_name}", marks=pytest.mark.comprehensive, ) for changelog_mode in [ChangelogMode.INIT, ChangelogMode.UPDATE] for changelog_file, cl_sanitizer in [ ( changelog_md_file.__name__, get_sanitized_md_changelog_content.__name__, ), ( changelog_rst_file.__name__, get_sanitized_rst_changelog_content.__name__, ), ] for repo_fixture_name, custom_commit_message in [ *[ ( # Repos: Must have at least 2 releases repo_w_trunk_only_conventional_commits.__name__, commit_msg, ) for commit_msg in [ dedent( # Conventional compliant prefix with skip-ci idicator """\ chore(release): v{version} [skip ci] Automatically generated by python-semantic-release. """ ), ] ], ] ], ) def test_version_changelog_content_custom_commit_message_excluded_automatically( repo_result: BuiltRepoResult, get_versions_from_repo_build_def: GetVersionsFromRepoBuildDefFn, get_cfg_value_from_def: GetCfgValueFromDefFn, split_repo_actions_by_release_tags: SplitRepoActionsByReleaseTagsFn, build_repo_from_definition: BuildRepoFromDefinitionFn, cli_runner: CliRunner, update_pyproject_toml: UpdatePyprojectTomlFn, changelog_file: Path, changelog_mode: ChangelogMode, custom_commit_message: str, cache: pytest.Cache, cache_key: str, stable_now_date: GetStableDateNowFn, example_project_dir: Path, get_sanitized_changelog_content: GetSanitizedChangelogContentFn, ): """ Given a repo with a custom release commit message When the version subcommand is invoked with the changelog flag Then the resulting changelog content should not include the custom commit message It should work regardless of changelog mode and changelog file type """ expected_changelog_content = get_sanitized_changelog_content( repo_dir=example_project_dir, remove_insertion_flag=bool(changelog_mode == ChangelogMode.INIT), ) repo = repo_result["repo"] repo_def = repo_result["definition"] tag_format_str: str = get_cfg_value_from_def(repo_def, "tag_format_str") # type: ignore[assignment] all_versions = get_versions_from_repo_build_def(repo_def) latest_tag = tag_format_str.format(version=all_versions[-1]) previous_tag = tag_format_str.format(version=all_versions[-2]) # split repo actions by release actions releasetags_2_steps: dict[str, list[RepoActions]] = ( split_repo_actions_by_release_tags(repo_def, tag_format_str) ) # Reverse release to make the previous version again with the new commit message repo.git.tag("-d", latest_tag) repo.git.reset("--hard", f"{previous_tag}~1") repo.git.tag("-d", previous_tag) # Set the project configurations update_pyproject_toml("tool.semantic_release.changelog.mode", changelog_mode.value) update_pyproject_toml( "tool.semantic_release.changelog.default_templates.changelog_file", str(changelog_file.name), ) update_pyproject_toml( "tool.semantic_release.commit_message", custom_commit_message, ) if not (repo_build_data := cache.get(cache_key, None)): pytest.fail("Repo build date not found in cache") repo_build_datetime = datetime.strptime(repo_build_data["build_date"], "%Y-%m-%d") now_datetime = stable_now_date().replace( year=repo_build_datetime.year, month=repo_build_datetime.month, day=repo_build_datetime.day, ) if changelog_mode == ChangelogMode.UPDATE and len(all_versions) == 2: # When in update mode, and at the very first release, its better the # changelog file does not exist as we have an non-conformative example changelog # in the base example project delete_file(example_project_dir / changelog_file) cli_cmd = [MAIN_PROG_NAME, VERSION_SUBCMD, "--no-push", "--changelog"] # Act: make the first release again with freeze_time(now_datetime.astimezone(timezone.utc)): result = cli_runner.invoke(main, cli_cmd[1:]) assert_successful_exit_code(result, cli_cmd) # Act: apply commits for change of version steps_for_next_release = releasetags_2_steps[latest_tag][ :-1 ] # stop before the release step build_repo_from_definition( dest_dir=example_project_dir, repo_construction_steps=steps_for_next_release, ) # Act: make the second release again with freeze_time(now_datetime.astimezone(timezone.utc) + timedelta(minutes=1)): result = cli_runner.invoke(main, cli_cmd[1:]) actual_content = get_sanitized_changelog_content( repo_dir=example_project_dir, remove_insertion_flag=bool(changelog_mode == ChangelogMode.INIT), ) # Evaluate assert_successful_exit_code(result, cli_cmd) assert expected_changelog_content == actual_content python-semantic-release-9.21.0/tests/e2e/cmd_version/test_version_github_actions.py000066400000000000000000000025651475670435200306750ustar00rootroot00000000000000from __future__ import annotations from typing import TYPE_CHECKING import pytest from semantic_release.cli.commands.main import main from tests.const import MAIN_PROG_NAME, VERSION_SUBCMD from tests.fixtures.repos import ( repo_w_git_flow_w_alpha_prereleases_n_conventional_commits, ) from tests.util import actions_output_to_dict, assert_successful_exit_code if TYPE_CHECKING: from pathlib import Path from click.testing import CliRunner @pytest.mark.usefixtures( repo_w_git_flow_w_alpha_prereleases_n_conventional_commits.__name__ ) def test_version_writes_github_actions_output( cli_runner: CliRunner, monkeypatch: pytest.MonkeyPatch, tmp_path: Path, ): mock_output_file = tmp_path / "action.out" monkeypatch.setenv("GITHUB_OUTPUT", str(mock_output_file.resolve())) cli_cmd = [MAIN_PROG_NAME, VERSION_SUBCMD, "--patch", "--no-push"] # Act result = cli_runner.invoke(main, cli_cmd[1:]) # Extract the output action_outputs = actions_output_to_dict( mock_output_file.read_text(encoding="utf-8") ) # Evaluate assert_successful_exit_code(result, cli_cmd) assert "released" in action_outputs assert action_outputs["released"] == "true" assert "version" in action_outputs assert action_outputs["version"] == "1.2.1" assert "tag" in action_outputs assert action_outputs["tag"] == "v1.2.1" python-semantic-release-9.21.0/tests/e2e/cmd_version/test_version_print.py000066400000000000000000001067241475670435200270310ustar00rootroot00000000000000from __future__ import annotations from typing import TYPE_CHECKING import pytest from pytest_lazy_fixtures.lazy_fixture import lf as lazy_fixture from semantic_release.cli.commands.main import main from tests.const import ( MAIN_PROG_NAME, VERSION_SUBCMD, ) from tests.fixtures.commit_parsers import conventional_minor_commits from tests.fixtures.git_repo import get_commit_def_of_conventional_commit from tests.fixtures.repos import ( repo_w_git_flow_w_rc_n_alpha_prereleases_n_conventional_commits_using_tag_format, repo_w_no_tags_conventional_commits, repo_w_trunk_only_conventional_commits, repo_w_trunk_only_conventional_commits_using_tag_format, ) from tests.fixtures.repos.trunk_based_dev.repo_w_no_tags import ( repo_w_no_tags_conventional_commits_using_tag_format, ) from tests.util import ( add_text_to_file, assert_exit_code, assert_successful_exit_code, ) if TYPE_CHECKING: from unittest.mock import MagicMock from click.testing import CliRunner from requests_mock import Mocker from tests.fixtures.git_repo import ( BuiltRepoResult, GetCfgValueFromDefFn, GetCommitDefFn, GetVersionsFromRepoBuildDefFn, SimulateChangeCommitsNReturnChangelogEntryFn, ) @pytest.mark.parametrize( "repo_result, commits, force_args, next_release_version", [ ( lazy_fixture(repo_w_trunk_only_conventional_commits.__name__), lazy_fixture(conventional_minor_commits.__name__), cli_args, next_release_version, ) for cli_args, next_release_version in ( # Dynamic version bump determination (based on commits) ([], "0.2.0"), # Dynamic version bump determination (based on commits) with build metadata (["--build-metadata", "build.12345"], "0.2.0+build.12345"), # Forced version bump (["--prerelease"], "0.1.1-rc.1"), (["--patch"], "0.1.2"), (["--minor"], "0.2.0"), (["--major"], "1.0.0"), # Forced version bump with --build-metadata (["--patch", "--build-metadata", "build.12345"], "0.1.2+build.12345"), # Forced version bump with --as-prerelease (["--prerelease", "--as-prerelease"], "0.1.1-rc.1"), (["--patch", "--as-prerelease"], "0.1.2-rc.1"), (["--minor", "--as-prerelease"], "0.2.0-rc.1"), (["--major", "--as-prerelease"], "1.0.0-rc.1"), # Forced version bump with --as-prerelease and modified --prerelease-token ( ["--patch", "--as-prerelease", "--prerelease-token", "beta"], "0.1.2-beta.1", ), # Forced version bump with --as-prerelease and modified --prerelease-token # and --build-metadata ( [ "--patch", "--as-prerelease", "--prerelease-token", "beta", "--build-metadata", "build.12345", ], "0.1.2-beta.1+build.12345", ), ) ], ) def test_version_print_next_version( repo_result: BuiltRepoResult, commits: list[str], force_args: list[str], next_release_version: str, file_in_repo: str, cli_runner: CliRunner, mocked_git_push: MagicMock, post_mocker: Mocker, ): """ Given a generic repository at the latest release version and a subsequent commit, When running the version command with the --print flag, Then the expected next version should be printed and exit without making any changes to the repository. Note: The point of this test is to only verify that the `--print` flag does not make any changes to the repository--not to validate if the next version is calculated correctly per the repository structure (see test_version_release & test_version_force_level for correctness). However, we do validate that --print & a force option and/or --as-prerelease options work together to print the next version correctly but not make a change to the repo. """ repo = repo_result["repo"] # Make a commit to ensure we have something to release # otherwise the "no release will be made" logic will kick in first add_text_to_file(repo, file_in_repo) repo.git.commit(m=commits[-1], a=True) # Setup: take measurement before running the version command repo_status_before = repo.git.status(short=True) head_before = repo.head.commit.hexsha tags_before = {tag.name for tag in repo.tags} # Act cli_cmd = [MAIN_PROG_NAME, VERSION_SUBCMD, "--print", *force_args] result = cli_runner.invoke(main, cli_cmd[1:]) # take measurement after running the version command repo_status_after = repo.git.status(short=True) head_after = repo.head.commit.hexsha tags_after = {tag.name for tag in repo.tags} tags_set_difference = set.difference(tags_after, tags_before) # Evaluate assert_successful_exit_code(result, cli_cmd) assert not result.stderr assert f"{next_release_version}\n" == result.stdout # assert nothing else happened (no code changes, no commit, no tag, no push, no vcs release) assert repo_status_before == repo_status_after assert head_before == head_after assert not tags_set_difference assert mocked_git_push.call_count == 0 assert post_mocker.call_count == 0 @pytest.mark.parametrize( "repo_result, commits, force_args, next_release_version", [ *[ pytest.param( lazy_fixture(repo_fixture_name), lazy_fixture(conventional_minor_commits.__name__), cli_args, next_release_version, marks=marks if marks else [], ) for repo_fixture_name, marks in ( (repo_w_trunk_only_conventional_commits.__name__, None), ( repo_w_trunk_only_conventional_commits_using_tag_format.__name__, pytest.mark.comprehensive, ), ) for cli_args, next_release_version in ( # Dynamic version bump determination (based on commits) ([], "0.2.0"), # Dynamic version bump determination (based on commits) with build metadata (["--build-metadata", "build.12345"], "0.2.0+build.12345"), # Forced version bump (["--prerelease"], "0.1.1-rc.1"), (["--patch"], "0.1.2"), (["--minor"], "0.2.0"), (["--major"], "1.0.0"), # Forced version bump with --build-metadata (["--patch", "--build-metadata", "build.12345"], "0.1.2+build.12345"), # Forced version bump with --as-prerelease (["--prerelease", "--as-prerelease"], "0.1.1-rc.1"), (["--patch", "--as-prerelease"], "0.1.2-rc.1"), (["--minor", "--as-prerelease"], "0.2.0-rc.1"), (["--major", "--as-prerelease"], "1.0.0-rc.1"), # Forced version bump with --as-prerelease and modified --prerelease-token ( ["--patch", "--as-prerelease", "--prerelease-token", "beta"], "0.1.2-beta.1", ), # Forced version bump with --as-prerelease and modified --prerelease-token # and --build-metadata ( [ "--patch", "--as-prerelease", "--prerelease-token", "beta", "--build-metadata", "build.12345", ], "0.1.2-beta.1+build.12345", ), ) ], *[ pytest.param( lazy_fixture(repo_fixture_name), [], cli_args, next_release_version, marks=pytest.mark.comprehensive, ) for repo_fixture_name in ( repo_w_no_tags_conventional_commits.__name__, repo_w_no_tags_conventional_commits_using_tag_format.__name__, ) for cli_args, next_release_version in ( # Dynamic version bump determination (based on commits) ([], "0.1.0"), # Dynamic version bump determination (based on commits) with build metadata (["--build-metadata", "build.12345"], "0.1.0+build.12345"), # Forced version bump (["--prerelease"], "0.0.0-rc.1"), (["--patch"], "0.0.1"), (["--minor"], "0.1.0"), (["--major"], "1.0.0"), # Forced version bump with --build-metadata (["--patch", "--build-metadata", "build.12345"], "0.0.1+build.12345"), # Forced version bump with --as-prerelease (["--prerelease", "--as-prerelease"], "0.0.0-rc.1"), (["--patch", "--as-prerelease"], "0.0.1-rc.1"), (["--minor", "--as-prerelease"], "0.1.0-rc.1"), (["--major", "--as-prerelease"], "1.0.0-rc.1"), # Forced version bump with --as-prerelease and modified --prerelease-token ( ["--patch", "--as-prerelease", "--prerelease-token", "beta"], "0.0.1-beta.1", ), # Forced version bump with --as-prerelease and modified --prerelease-token # and --build-metadata ( [ "--patch", "--as-prerelease", "--prerelease-token", "beta", "--build-metadata", "build.12345", ], "0.0.1-beta.1+build.12345", ), ) ], ], ) def test_version_print_tag_prints_next_tag( repo_result: BuiltRepoResult, commits: list[str], force_args: list[str], next_release_version: str, get_cfg_value_from_def: GetCfgValueFromDefFn, file_in_repo: str, cli_runner: CliRunner, mocked_git_push: MagicMock, post_mocker: Mocker, ): """ Given a generic repository at the latest release version and a subsequent commit, When running the version command with the --print-tag flag, Then the expected next release tag should be printed and exit without making any changes to the repository. Note: The point of this test is to only verify that the `--print-tag` flag does not make any changes to the repository--not to validate if the next version is calculated correctly per the repository structure (see test_version_release & test_version_force_level for correctness). However, we do validate that --print-tag & a force option and/or --as-prerelease options work together to print the next release tag correctly but not make a change to the repo. """ repo = repo_result["repo"] repo_def = repo_result["definition"] tag_format_str: str = get_cfg_value_from_def(repo_def, "tag_format_str") # type: ignore[assignment] next_release_tag = tag_format_str.format(version=next_release_version) if len(commits) > 1: # Make a commit to ensure we have something to release # otherwise the "no release will be made" logic will kick in first add_text_to_file(repo, file_in_repo) repo.git.commit(m=commits[-1], a=True) # Setup: take measurement before running the version command repo_status_before = repo.git.status(short=True) head_before = repo.head.commit.hexsha tags_before = {tag.name for tag in repo.tags} # Act cli_cmd = [MAIN_PROG_NAME, VERSION_SUBCMD, "--print-tag", *force_args] result = cli_runner.invoke(main, cli_cmd[1:]) # take measurement after running the version command repo_status_after = repo.git.status(short=True) head_after = repo.head.commit.hexsha tags_after = {tag.name for tag in repo.tags} tags_set_difference = set.difference(tags_after, tags_before) # Evaluate assert_successful_exit_code(result, cli_cmd) assert not result.stderr assert f"{next_release_tag}\n" == result.stdout # assert nothing else happened (no code changes, no commit, no tag, no push, no vcs release) assert repo_status_before == repo_status_after assert head_before == head_after assert not tags_set_difference assert mocked_git_push.call_count == 0 assert post_mocker.call_count == 0 @pytest.mark.parametrize( "repo_result", [lazy_fixture(repo_w_trunk_only_conventional_commits.__name__)], ) def test_version_print_last_released_prints_version( repo_result: BuiltRepoResult, get_versions_from_repo_build_def: GetVersionsFromRepoBuildDefFn, cli_runner: CliRunner, mocked_git_push: MagicMock, post_mocker: Mocker, ): repo = repo_result["repo"] latest_release_version = get_versions_from_repo_build_def( repo_result["definition"] )[-1] # Setup: take measurement before running the version command repo_status_before = repo.git.status(short=True) head_before = repo.head.commit.hexsha tags_before = {tag.name for tag in repo.tags} # Act cli_cmd = [MAIN_PROG_NAME, VERSION_SUBCMD, "--print-last-released"] result = cli_runner.invoke(main, cli_cmd[1:]) # take measurement after running the version command repo_status_after = repo.git.status(short=True) head_after = repo.head.commit.hexsha tags_after = {tag.name for tag in repo.tags} tags_set_difference = set.difference(tags_after, tags_before) # Evaluate assert_successful_exit_code(result, cli_cmd) assert not result.stderr assert f"{latest_release_version}\n" == result.stdout # assert nothing else happened (no code changes, no commit, no tag, no push, no vcs release) assert repo_status_before == repo_status_after assert head_before == head_after assert not tags_set_difference assert mocked_git_push.call_count == 0 assert post_mocker.call_count == 0 @pytest.mark.parametrize( "repo_result, commits", [ ( lazy_fixture(repo_w_trunk_only_conventional_commits.__name__), lazy_fixture(conventional_minor_commits.__name__), ) ], ) def test_version_print_last_released_prints_released_if_commits( repo_result: BuiltRepoResult, get_versions_from_repo_build_def: GetVersionsFromRepoBuildDefFn, commits: list[str], cli_runner: CliRunner, mocked_git_push: MagicMock, post_mocker: Mocker, file_in_repo: str, ): repo = repo_result["repo"] latest_release_version = get_versions_from_repo_build_def( repo_result["definition"] )[-1] # Make a commit so the head is not on the last release add_text_to_file(repo, file_in_repo) repo.git.commit(m=commits[0], a=True) # Setup: take measurement before running the version command repo_status_before = repo.git.status(short=True) head_before = repo.head.commit.hexsha tags_before = {tag.name for tag in repo.tags} # Act cli_cmd = [MAIN_PROG_NAME, VERSION_SUBCMD, "--print-last-released"] result = cli_runner.invoke(main, cli_cmd[1:]) # take measurement after running the version command repo_status_after = repo.git.status(short=True) head_after = repo.head.commit.hexsha tags_after = {tag.name for tag in repo.tags} tags_set_difference = set.difference(tags_after, tags_before) # Evaluate assert_successful_exit_code(result, cli_cmd) assert not result.stderr assert f"{latest_release_version}\n" == result.stdout # assert nothing else happened (no code changes, no commit, no tag, no push, no vcs release) assert repo_status_before == repo_status_after assert head_before == head_after assert not tags_set_difference assert mocked_git_push.call_count == 0 assert post_mocker.call_count == 0 @pytest.mark.parametrize( "repo_result", [lazy_fixture(repo_w_no_tags_conventional_commits.__name__)], ) def test_version_print_last_released_prints_nothing_if_no_tags( repo_result: BuiltRepoResult, cli_runner: CliRunner, mocked_git_push: MagicMock, post_mocker: Mocker, caplog: pytest.LogCaptureFixture, ): repo = repo_result["repo"] # Setup: take measurement before running the version command repo_status_before = repo.git.status(short=True) head_sha_before = repo.head.commit.hexsha tags_before = {tag.name for tag in repo.tags} # Act cli_cmd = [MAIN_PROG_NAME, VERSION_SUBCMD, "--print-last-released"] result = cli_runner.invoke(main, cli_cmd[1:]) # take measurement after running the version command repo_status_after = repo.git.status(short=True) head_after = repo.head.commit tags_after = {tag.name for tag in repo.tags} tags_set_difference = set.difference(tags_after, tags_before) # Evaluate (no release actions should have occurred on print) assert_successful_exit_code(result, cli_cmd) assert result.stdout == "" # must use capture log to see this, because we use the logger to print this message # not click's output assert "No release tags found." in caplog.text # assert nothing else happened (no code changes, no commit, no tag, no push, no vcs release) assert repo_status_before == repo_status_after assert head_sha_before == head_after.hexsha # No commit has been made assert not tags_set_difference # No tag created assert mocked_git_push.call_count == 0 # no git push of tag or commit assert post_mocker.call_count == 0 # no vcs release @pytest.mark.parametrize( "repo_result", [lazy_fixture(repo_w_trunk_only_conventional_commits.__name__)], ) def test_version_print_last_released_on_detached_head( repo_result: BuiltRepoResult, get_versions_from_repo_build_def: GetVersionsFromRepoBuildDefFn, cli_runner: CliRunner, mocked_git_push: MagicMock, post_mocker: Mocker, ): repo = repo_result["repo"] latest_release_version = get_versions_from_repo_build_def( repo_result["definition"] )[-1] # Setup: put the repo in a detached head state repo.git.checkout("HEAD", detach=True) # Setup: take measurement before running the version command repo_status_before = repo.git.status(short=True) head_sha_before = repo.head.commit.hexsha tags_before = {tag.name for tag in repo.tags} # Act cli_cmd = [MAIN_PROG_NAME, VERSION_SUBCMD, "--print-last-released"] result = cli_runner.invoke(main, cli_cmd[1:]) # take measurement after running the version command repo_status_after = repo.git.status(short=True) head_after = repo.head.commit tags_after = {tag.name for tag in repo.tags} tags_set_difference = set.difference(tags_after, tags_before) # Evaluate (expected -> actual) assert_successful_exit_code(result, cli_cmd) assert not result.stderr assert f"{latest_release_version}\n" == result.stdout # assert nothing else happened (no code changes, no commit, no tag, no push, no vcs release) assert repo_status_before == repo_status_after assert head_sha_before == head_after.hexsha # No commit has been made assert not tags_set_difference # No tag created assert mocked_git_push.call_count == 0 # no git push of tag or commit assert post_mocker.call_count == 0 # no vcs release @pytest.mark.parametrize( "repo_result", [lazy_fixture(repo_w_trunk_only_conventional_commits.__name__)], ) def test_version_print_last_released_on_nonrelease_branch( repo_result: BuiltRepoResult, get_versions_from_repo_build_def: GetVersionsFromRepoBuildDefFn, cli_runner: CliRunner, mocked_git_push: MagicMock, post_mocker: Mocker, ): repo = repo_result["repo"] latest_release_version = get_versions_from_repo_build_def( repo_result["definition"] )[-1] # Setup: put the repo on a non-release branch repo.create_head("next").checkout() # Setup: take measurement before running the version command repo_status_before = repo.git.status(short=True) head_sha_before = repo.head.commit.hexsha tags_before = {tag.name for tag in repo.tags} # Act cli_cmd = [MAIN_PROG_NAME, VERSION_SUBCMD, "--print-last-released"] result = cli_runner.invoke(main, cli_cmd[1:]) # take measurement after running the version command repo_status_after = repo.git.status(short=True) head_after = repo.head.commit tags_after = {tag.name for tag in repo.tags} tags_set_difference = set.difference(tags_after, tags_before) # Evaluate (expected -> actual) assert_successful_exit_code(result, cli_cmd) assert not result.stderr assert f"{latest_release_version}\n" == result.stdout # assert nothing else happened (no code changes, no commit, no tag, no push, no vcs release) assert repo_status_before == repo_status_after assert head_sha_before == head_after.hexsha # No commit has been made assert not tags_set_difference # No tag created assert mocked_git_push.call_count == 0 # no git push of tag or commit assert post_mocker.call_count == 0 # no vcs release @pytest.mark.parametrize( "repo_result", [ lazy_fixture(repo_w_trunk_only_conventional_commits.__name__), pytest.param( lazy_fixture( repo_w_git_flow_w_rc_n_alpha_prereleases_n_conventional_commits_using_tag_format.__name__ ), marks=pytest.mark.comprehensive, ), ], ) def test_version_print_last_released_tag_prints_correct_tag( repo_result: BuiltRepoResult, get_cfg_value_from_def: GetCfgValueFromDefFn, get_versions_from_repo_build_def: GetVersionsFromRepoBuildDefFn, cli_runner: CliRunner, mocked_git_push: MagicMock, post_mocker: Mocker, ): repo = repo_result["repo"] repo_def = repo_result["definition"] tag_format_str: str = get_cfg_value_from_def(repo_def, "tag_format_str") # type: ignore[assignment] latest_release_version = get_versions_from_repo_build_def(repo_def)[-1] latest_release_tag = tag_format_str.format(version=latest_release_version) # Setup: take measurement before running the version command repo_status_before = repo.git.status(short=True) head_before = repo.head.commit.hexsha tags_before = {tag.name for tag in repo.tags} # Act cli_cmd = [MAIN_PROG_NAME, VERSION_SUBCMD, "--print-last-released-tag"] result = cli_runner.invoke(main, cli_cmd[1:]) # take measurement after running the version command repo_status_after = repo.git.status(short=True) head_after = repo.head.commit.hexsha tags_after = {tag.name for tag in repo.tags} tags_set_difference = set.difference(tags_after, tags_before) # Evaluate assert_successful_exit_code(result, cli_cmd) assert not result.stderr assert f"{latest_release_tag}\n" == result.stdout # assert nothing else happened (no code changes, no commit, no tag, no push, no vcs release) assert repo_status_before == repo_status_after assert head_before == head_after assert not tags_set_difference assert mocked_git_push.call_count == 0 assert post_mocker.call_count == 0 @pytest.mark.parametrize( "repo_result, commits", [ ( lazy_fixture(repo_w_trunk_only_conventional_commits.__name__), lazy_fixture(conventional_minor_commits.__name__), ), pytest.param( lazy_fixture( repo_w_git_flow_w_rc_n_alpha_prereleases_n_conventional_commits_using_tag_format.__name__ ), lazy_fixture(conventional_minor_commits.__name__), marks=pytest.mark.comprehensive, ), ], ) def test_version_print_last_released_tag_prints_released_if_commits( repo_result: BuiltRepoResult, get_cfg_value_from_def: GetCfgValueFromDefFn, get_versions_from_repo_build_def: GetVersionsFromRepoBuildDefFn, commits: list[str], cli_runner: CliRunner, mocked_git_push: MagicMock, post_mocker: Mocker, file_in_repo: str, ): repo = repo_result["repo"] repo_def = repo_result["definition"] tag_format_str: str = get_cfg_value_from_def(repo_def, "tag_format_str") # type: ignore[assignment] latest_release_version = get_versions_from_repo_build_def(repo_def)[-1] latest_release_tag = tag_format_str.format(version=latest_release_version) # Make a commit so the head is not on the last release add_text_to_file(repo, file_in_repo) repo.git.commit(m=commits[0], a=True) # Setup: take measurement before running the version command repo_status_before = repo.git.status(short=True) head_before = repo.head.commit.hexsha tags_before = {tag.name for tag in repo.tags} # Act cli_cmd = [MAIN_PROG_NAME, VERSION_SUBCMD, "--print-last-released-tag"] result = cli_runner.invoke(main, cli_cmd[1:]) # take measurement after running the version command repo_status_after = repo.git.status(short=True) head_after = repo.head.commit.hexsha tags_after = {tag.name for tag in repo.tags} tags_set_difference = set.difference(tags_after, tags_before) # Evaluate assert_successful_exit_code(result, cli_cmd) assert not result.stderr assert f"{latest_release_tag}\n" == result.stdout # assert nothing else happened (no code changes, no commit, no tag, no push, no vcs release) assert repo_status_before == repo_status_after assert head_before == head_after assert not tags_set_difference assert mocked_git_push.call_count == 0 assert post_mocker.call_count == 0 @pytest.mark.parametrize( "repo_result", [lazy_fixture(repo_w_no_tags_conventional_commits.__name__)], ) def test_version_print_last_released_tag_prints_nothing_if_no_tags( repo_result: BuiltRepoResult, cli_runner: CliRunner, mocked_git_push: MagicMock, post_mocker: Mocker, caplog: pytest.LogCaptureFixture, ): repo = repo_result["repo"] # Setup: take measurement before running the version command repo_status_before = repo.git.status(short=True) head_sha_before = repo.head.commit.hexsha tags_before = {tag.name for tag in repo.tags} # Act cli_cmd = [MAIN_PROG_NAME, VERSION_SUBCMD, "--print-last-released-tag"] result = cli_runner.invoke(main, cli_cmd[1:]) # take measurement after running the version command repo_status_after = repo.git.status(short=True) head_after = repo.head.commit tags_after = {tag.name for tag in repo.tags} tags_set_difference = set.difference(tags_after, tags_before) # Evaluate (no release actions should have occurred on print) assert_successful_exit_code(result, cli_cmd) assert result.stdout == "" # must use capture log to see this, because we use the logger to print this message # not click's output assert "No release tags found." in caplog.text # assert nothing else happened (no code changes, no commit, no tag, no push, no vcs release) assert repo_status_before == repo_status_after assert head_sha_before == head_after.hexsha # No commit has been made assert not tags_set_difference # No tag created assert mocked_git_push.call_count == 0 # no git push of tag or commit assert post_mocker.call_count == 0 # no vcs release @pytest.mark.parametrize( "repo_result", [ lazy_fixture(repo_w_trunk_only_conventional_commits.__name__), pytest.param( lazy_fixture( repo_w_git_flow_w_rc_n_alpha_prereleases_n_conventional_commits_using_tag_format.__name__ ), marks=pytest.mark.comprehensive, ), ], ) def test_version_print_last_released_tag_on_detached_head( repo_result: BuiltRepoResult, get_cfg_value_from_def: GetCfgValueFromDefFn, get_versions_from_repo_build_def: GetVersionsFromRepoBuildDefFn, cli_runner: CliRunner, mocked_git_push: MagicMock, post_mocker: Mocker, ): repo = repo_result["repo"] repo_def = repo_result["definition"] tag_format_str: str = get_cfg_value_from_def(repo_def, "tag_format_str") # type: ignore[assignment] latest_release_version = get_versions_from_repo_build_def(repo_def)[-1] latest_release_tag = tag_format_str.format(version=latest_release_version) # Setup: put the repo in a detached head state repo.git.checkout("HEAD", detach=True) # Setup: take measurement before running the version command repo_status_before = repo.git.status(short=True) head_sha_before = repo.head.commit.hexsha tags_before = {tag.name for tag in repo.tags} # Act cli_cmd = [MAIN_PROG_NAME, VERSION_SUBCMD, "--print-last-released-tag"] result = cli_runner.invoke(main, cli_cmd[1:]) # take measurement after running the version command repo_status_after = repo.git.status(short=True) head_after = repo.head.commit tags_after = {tag.name for tag in repo.tags} tags_set_difference = set.difference(tags_after, tags_before) # Evaluate (expected -> actual) assert_successful_exit_code(result, cli_cmd) assert not result.stderr assert f"{latest_release_tag}\n" == result.stdout # assert nothing else happened (no code changes, no commit, no tag, no push, no vcs release) assert repo_status_before == repo_status_after assert head_sha_before == head_after.hexsha # No commit has been made assert not tags_set_difference # No tag created assert mocked_git_push.call_count == 0 # no git push of tag or commit assert post_mocker.call_count == 0 # no vcs release @pytest.mark.parametrize( "repo_result", [ lazy_fixture(repo_w_trunk_only_conventional_commits.__name__), pytest.param( lazy_fixture( repo_w_git_flow_w_rc_n_alpha_prereleases_n_conventional_commits_using_tag_format.__name__ ), marks=pytest.mark.comprehensive, ), ], ) def test_version_print_last_released_tag_on_nonrelease_branch( repo_result: BuiltRepoResult, get_cfg_value_from_def: GetCfgValueFromDefFn, get_versions_from_repo_build_def: GetVersionsFromRepoBuildDefFn, cli_runner: CliRunner, mocked_git_push: MagicMock, post_mocker: Mocker, ): repo = repo_result["repo"] repo_def = repo_result["definition"] tag_format_str: str = get_cfg_value_from_def(repo_def, "tag_format_str") # type: ignore[assignment] latest_release_version = get_versions_from_repo_build_def(repo_def)[-1] last_release_tag = tag_format_str.format(version=latest_release_version) # Setup: put the repo on a non-release branch repo.create_head("next").checkout() # Setup: take measurement before running the version command repo_status_before = repo.git.status(short=True) head_sha_before = repo.head.commit.hexsha tags_before = {tag.name for tag in repo.tags} # Act cli_cmd = [MAIN_PROG_NAME, VERSION_SUBCMD, "--print-last-released-tag"] result = cli_runner.invoke(main, cli_cmd[1:]) # take measurement after running the version command repo_status_after = repo.git.status(short=True) head_after = repo.head.commit tags_after = {tag.name for tag in repo.tags} tags_set_difference = set.difference(tags_after, tags_before) # Evaluate (expected -> actual) assert_successful_exit_code(result, cli_cmd) assert not result.stderr assert f"{last_release_tag}\n" == result.stdout # assert nothing else happened (no code changes, no commit, no tag, no push, no vcs release) assert repo_status_before == repo_status_after assert head_sha_before == head_after.hexsha assert not tags_set_difference assert mocked_git_push.call_count == 0 assert post_mocker.call_count == 0 @pytest.mark.parametrize( "repo_result, get_commit_def_fn", [ ( lazy_fixture(repo_w_trunk_only_conventional_commits.__name__), lazy_fixture(get_commit_def_of_conventional_commit.__name__), ) ], ) def test_version_print_next_version_fails_on_detached_head( repo_result: BuiltRepoResult, cli_runner: CliRunner, simulate_change_commits_n_rtn_changelog_entry: SimulateChangeCommitsNReturnChangelogEntryFn, get_commit_def_fn: GetCommitDefFn, mocked_git_push: MagicMock, post_mocker: Mocker, ): repo = repo_result["repo"] expected_error_msg = ( "Detached HEAD state cannot match any release groups; no release will be made" ) # Setup: put the repo in a detached head state repo.git.checkout("HEAD", detach=True) # Setup: make a commit to ensure we have something to release simulate_change_commits_n_rtn_changelog_entry( repo, [get_commit_def_fn("fix: make a patch fix to codebase")], ) # Setup: take measurement before running the version command repo_status_before = repo.git.status(short=True) head_before = repo.head.commit.hexsha tags_before = {tag.name for tag in repo.tags} # Act cli_cmd = [MAIN_PROG_NAME, VERSION_SUBCMD, "--print"] result = cli_runner.invoke(main, cli_cmd[1:]) # take measurement after running the version command repo_status_after = repo.git.status(short=True) head_after = repo.head.commit.hexsha tags_after = {tag.name for tag in repo.tags} tags_set_difference = set.difference(tags_after, tags_before) # Evaluate (expected -> actual) assert_exit_code(1, result, cli_cmd) assert not result.stdout assert f"{expected_error_msg}\n" == result.stderr # assert nothing else happened (no code changes, no commit, no tag, no push, no vcs release) assert repo_status_before == repo_status_after assert head_before == head_after assert not tags_set_difference assert mocked_git_push.call_count == 0 assert post_mocker.call_count == 0 @pytest.mark.parametrize( "repo_result, get_commit_def_fn", [ ( lazy_fixture(repo_w_trunk_only_conventional_commits.__name__), lazy_fixture(get_commit_def_of_conventional_commit.__name__), ) ], ) def test_version_print_next_tag_fails_on_detached_head( repo_result: BuiltRepoResult, cli_runner: CliRunner, simulate_change_commits_n_rtn_changelog_entry: SimulateChangeCommitsNReturnChangelogEntryFn, get_commit_def_fn: GetCommitDefFn, mocked_git_push: MagicMock, post_mocker: Mocker, ): repo = repo_result["repo"] expected_error_msg = ( "Detached HEAD state cannot match any release groups; no release will be made" ) # Setup: put the repo in a detached head state repo.git.checkout("HEAD", detach=True) # Setup: make a commit to ensure we have something to release simulate_change_commits_n_rtn_changelog_entry( repo, [get_commit_def_fn("fix: make a patch fix to codebase")], ) # Setup: take measurement before running the version command repo_status_before = repo.git.status(short=True) head_before = repo.head.commit.hexsha tags_before = {tag.name for tag in repo.tags} # Act cli_cmd = [MAIN_PROG_NAME, VERSION_SUBCMD, "--print-tag"] result = cli_runner.invoke(main, cli_cmd[1:]) # take measurement after running the version command repo_status_after = repo.git.status(short=True) head_after = repo.head.commit.hexsha tags_after = {tag.name for tag in repo.tags} tags_set_difference = set.difference(tags_after, tags_before) # Evaluate (expected -> actual) assert_exit_code(1, result, cli_cmd) assert not result.stdout assert f"{expected_error_msg}\n" == result.stderr # assert nothing else happened (no code changes, no commit, no tag, no push, no vcs release) assert repo_status_before == repo_status_after assert head_before == head_after assert not tags_set_difference assert mocked_git_push.call_count == 0 assert post_mocker.call_count == 0 python-semantic-release-9.21.0/tests/e2e/cmd_version/test_version_release_notes.py000066400000000000000000000131771475670435200305240ustar00rootroot00000000000000from __future__ import annotations import os from datetime import timezone from typing import TYPE_CHECKING import pytest from freezegun import freeze_time from pytest_lazy_fixtures.lazy_fixture import lf as lazy_fixture from semantic_release.cli.commands.main import main from semantic_release.version.version import Version from tests.const import ( EXAMPLE_RELEASE_NOTES_TEMPLATE, MAIN_PROG_NAME, VERSION_SUBCMD, RepoActionStep, ) from tests.fixtures.repos import repo_w_no_tags_conventional_commits from tests.fixtures.repos.trunk_based_dev.repo_w_no_tags import ( repo_w_no_tags_emoji_commits, repo_w_no_tags_scipy_commits, ) from tests.util import assert_successful_exit_code, get_release_history_from_context if TYPE_CHECKING: from unittest.mock import MagicMock from click.testing import CliRunner from requests_mock import Mocker from tests.conftest import GetStableDateNowFn from tests.e2e.conftest import ( RetrieveRuntimeContextFn, ) from tests.fixtures.example_project import ( UpdatePyprojectTomlFn, UseReleaseNotesTemplateFn, ) from tests.fixtures.git_repo import ( BuiltRepoResult, GenerateDefaultReleaseNotesFromDefFn, GetHvcsClientFromRepoDefFn, ) @pytest.mark.parametrize( "repo_result, next_release_version", [ (lazy_fixture(repo_w_no_tags_conventional_commits.__name__), "0.1.0"), ], ) def test_custom_release_notes_template( repo_result: BuiltRepoResult, next_release_version: str, cli_runner: CliRunner, use_release_notes_template: UseReleaseNotesTemplateFn, retrieve_runtime_context: RetrieveRuntimeContextFn, mocked_git_push: MagicMock, post_mocker: Mocker, ) -> None: """Verify the template `.release_notes.md.j2` from `template_dir` is used.""" release_version = Version.parse(next_release_version) # Setup use_release_notes_template() runtime_context = retrieve_runtime_context(repo_result["repo"]) # Act cli_cmd = [MAIN_PROG_NAME, VERSION_SUBCMD, "--vcs-release"] result = cli_runner.invoke(main, cli_cmd[1:]) # Must run this after the action because the release history object should be pulled from the # repository after a tag is created release_history = get_release_history_from_context(runtime_context) release = release_history.released[release_version] expected_release_notes = ( runtime_context.template_environment.from_string(EXAMPLE_RELEASE_NOTES_TEMPLATE) .render(release=release) .rstrip() + os.linesep ) # ensure normalized line endings after render expected_release_notes = str.join( os.linesep, str.split(expected_release_notes.replace("\r", ""), "\n"), ) # Assert assert_successful_exit_code(result, cli_cmd) assert mocked_git_push.call_count == 2 # 1 for commit, 1 for tag assert post_mocker.call_count == 1 assert post_mocker.last_request is not None actual_notes = post_mocker.last_request.json()["body"] assert expected_release_notes == actual_notes @pytest.mark.parametrize( "repo_result, license_name, license_setting", [ pytest.param( lazy_fixture(repo_fixture_name), license_name, license_setting, marks=pytest.mark.comprehensive, ) for license_name in ["", "MIT", "GPL-3.0"] for license_setting in [ "project.license-expression", "project.license", # deprecated "project.license.text", # deprecated ] for repo_fixture_name in [ repo_w_no_tags_conventional_commits.__name__, repo_w_no_tags_emoji_commits.__name__, repo_w_no_tags_scipy_commits.__name__, ] ], ) def test_default_release_notes_license_statement( repo_result: BuiltRepoResult, cli_runner: CliRunner, license_name: str, license_setting: str, update_pyproject_toml: UpdatePyprojectTomlFn, mocked_git_push: MagicMock, post_mocker: Mocker, stable_now_date: GetStableDateNowFn, get_hvcs_client_from_repo_def: GetHvcsClientFromRepoDefFn, generate_default_release_notes_from_def: GenerateDefaultReleaseNotesFromDefFn, ): new_version = "0.1.0" # Setup now_datetime = stable_now_date() repo_def = list(repo_result["definition"]) repo_def.append( { "action": RepoActionStep.RELEASE, "details": { "version": new_version, "datetime": now_datetime.isoformat(timespec="seconds"), }, } ) # Setup: Overwrite the default setting (defined in test.const) update_pyproject_toml("project.license-expression", None) # Setup: set the license for the test update_pyproject_toml(license_setting, license_name) expected_release_notes = generate_default_release_notes_from_def( version_actions=repo_def, hvcs=get_hvcs_client_from_repo_def(repo_def), previous_version=None, license_name=license_name, mask_initial_release=False, ) # Act with freeze_time(now_datetime.astimezone(timezone.utc)): cli_cmd = [MAIN_PROG_NAME, VERSION_SUBCMD, "--no-changelog", "--vcs-release"] result = cli_runner.invoke(main, cli_cmd[1:]) # Evaluate assert_successful_exit_code(result, cli_cmd) assert mocked_git_push.call_count == 2 # 1 for commit, 1 for tag assert post_mocker.call_count == 1 assert post_mocker.last_request is not None request_body = post_mocker.last_request.json() assert "body" in request_body actual_notes = request_body["body"] assert expected_release_notes == actual_notes python-semantic-release-9.21.0/tests/e2e/cmd_version/test_version_stamp.py000066400000000000000000000377131475670435200270220ustar00rootroot00000000000000from __future__ import annotations import json from pathlib import Path from textwrap import dedent from typing import TYPE_CHECKING import pytest import tomlkit import yaml from dotty_dict import Dotty from pytest_lazy_fixtures.lazy_fixture import lf as lazy_fixture from semantic_release.cli.commands.main import main from semantic_release.version.declarations.enum import VersionStampType from tests.const import EXAMPLE_PROJECT_NAME, MAIN_PROG_NAME, VERSION_SUBCMD from tests.fixtures.repos.trunk_based_dev.repo_w_no_tags import ( repo_w_no_tags_conventional_commits, ) from tests.fixtures.repos.trunk_based_dev.repo_w_prereleases import ( repo_w_trunk_only_n_prereleases_conventional_commits, ) from tests.util import ( assert_successful_exit_code, dynamic_python_import, ) if TYPE_CHECKING: from unittest.mock import MagicMock from click.testing import CliRunner from tests.fixtures.example_project import ExProjectDir, UpdatePyprojectTomlFn from tests.fixtures.git_repo import BuiltRepoResult VERSION_STAMP_CMD = [ MAIN_PROG_NAME, VERSION_SUBCMD, "--no-commit", "--no-tag", "--skip-build", "--no-changelog", ] """Using the version command, prevent any action besides stamping the version""" @pytest.mark.parametrize( "repo_result, expected_new_version", [ ( lazy_fixture(repo_w_trunk_only_n_prereleases_conventional_commits.__name__), "0.3.0", ) ], ) def test_version_only_stamp_version( repo_result: BuiltRepoResult, expected_new_version: str, cli_runner: CliRunner, mocked_git_push: MagicMock, post_mocker: MagicMock, example_pyproject_toml: Path, example_project_dir: ExProjectDir, example_changelog_md: Path, example_changelog_rst: Path, ) -> None: repo = repo_result["repo"] version_file = example_project_dir.joinpath( "src", EXAMPLE_PROJECT_NAME, "_version.py" ) expected_changed_files = sorted( [ "pyproject.toml", str(version_file.relative_to(example_project_dir)), ] ) # Setup: take measurement before running the version command head_sha_before = repo.head.commit.hexsha tags_before = {tag.name for tag in repo.tags} version_py_before = dynamic_python_import( version_file, f"{EXAMPLE_PROJECT_NAME}._version" ).__version__ pyproject_toml_before = tomlkit.loads( example_pyproject_toml.read_text(encoding="utf-8") ) # Modify the pyproject.toml to remove the version so we can compare it later pyproject_toml_before.get("tool", {}).get("poetry", {}).pop("version") # Act (stamp the version but also create the changelog) cli_cmd = [*VERSION_STAMP_CMD, "--minor"] result = cli_runner.invoke(main, cli_cmd[1:]) # take measurement after running the version command head_after = repo.head.commit tags_after = {tag.name for tag in repo.tags} tags_set_difference = set.difference(tags_after, tags_before) differing_files = [ # Make sure filepath uses os specific path separators str(Path(file)) for file in str(repo.git.diff(name_only=True)).splitlines() ] pyproject_toml_after = tomlkit.loads( example_pyproject_toml.read_text(encoding="utf-8") ) pyproj_version_after = ( pyproject_toml_after.get("tool", {}).get("poetry", {}).pop("version") ) # Load python module for reading the version (ensures the file is valid) version_py_after = dynamic_python_import( version_file, f"{EXAMPLE_PROJECT_NAME}._version" ).__version__ # Evaluate (no release actions should be taken but version should be stamped from forced minor bump) assert_successful_exit_code(result, cli_cmd) assert head_sha_before == head_after.hexsha # No commit should be made assert not tags_set_difference # No tag should be created # no push as it should be turned off automatically assert mocked_git_push.call_count == 0 assert post_mocker.call_count == 0 # no vcs release creation occurred # Files that should receive version change assert expected_changed_files == differing_files # Compare pyproject.toml assert pyproject_toml_before == pyproject_toml_after assert expected_new_version == pyproj_version_after # Compare _version.py assert expected_new_version == version_py_after assert version_py_before != version_py_after # ============================================================================== # # VERSION STAMP DIFFERENT CONTENT TYPES # # ============================================================================== # @pytest.mark.usefixtures(repo_w_no_tags_conventional_commits.__name__) def test_stamp_version_variables_python( cli_runner: CliRunner, update_pyproject_toml: UpdatePyprojectTomlFn, example_project_dir: ExProjectDir, ) -> None: new_version = "0.1.0" target_file = example_project_dir.joinpath( "src", EXAMPLE_PROJECT_NAME, "_version.py" ) # Set configuration to modify the python file update_pyproject_toml( "tool.semantic_release.version_variables", [f"{target_file.relative_to(example_project_dir)}:__version__"], ) # Act cli_cmd = VERSION_STAMP_CMD result = cli_runner.invoke(main, cli_cmd[1:]) # Check the result assert_successful_exit_code(result, cli_cmd) # Load python module for reading the version (ensures the file is valid) version_py_after = dynamic_python_import( target_file, f"{EXAMPLE_PROJECT_NAME}._version" ).__version__ # Check the version was updated assert new_version == version_py_after @pytest.mark.usefixtures(repo_w_no_tags_conventional_commits.__name__) def test_stamp_version_toml( cli_runner: CliRunner, update_pyproject_toml: UpdatePyprojectTomlFn, default_tag_format_str: str, ) -> None: orig_version = "0.0.0" new_version = "0.1.0" orig_release = default_tag_format_str.format(version=orig_version) new_release = default_tag_format_str.format(version=new_version) target_file = Path("example.toml") orig_toml = dedent( f"""\ [package] name = "example" version = "{orig_version}" release = "{orig_release}" date-released = "1970-01-01" """ ) orig_toml_obj = Dotty(tomlkit.parse(orig_toml)) # Write initial text in file target_file.write_text(orig_toml) # Set configuration to modify the yaml file update_pyproject_toml( "tool.semantic_release.version_toml", [ f"{target_file}:package.version:{VersionStampType.NUMBER_FORMAT.value}", f"{target_file}:package.release:{VersionStampType.TAG_FORMAT.value}", ], ) # Act cli_cmd = VERSION_STAMP_CMD result = cli_runner.invoke(main, cli_cmd[1:]) # Check the result assert_successful_exit_code(result, cli_cmd) # Read content resulting_toml_obj = Dotty(tomlkit.parse(target_file.read_text())) # Check the version was updated assert new_version == resulting_toml_obj["package.version"] assert new_release == resulting_toml_obj["package.release"] # Check the rest of the content is the same (by resetting the version & comparing) resulting_toml_obj["package.version"] = orig_version resulting_toml_obj["package.release"] = orig_release assert orig_toml_obj == resulting_toml_obj @pytest.mark.usefixtures(repo_w_no_tags_conventional_commits.__name__) def test_stamp_version_variables_yaml( cli_runner: CliRunner, update_pyproject_toml: UpdatePyprojectTomlFn, ) -> None: orig_version = "0.0.0" new_version = "0.1.0" target_file = Path("example.yml") orig_yaml = dedent( f"""\ --- package: example version: {orig_version} date-released: 1970-01-01 """ ) # Write initial text in file target_file.write_text(orig_yaml) # Set configuration to modify the yaml file update_pyproject_toml( "tool.semantic_release.version_variables", [f"{target_file}:version"] ) # Act cli_cmd = VERSION_STAMP_CMD result = cli_runner.invoke(main, cli_cmd[1:]) # Check the result assert_successful_exit_code(result, cli_cmd) # Read content resulting_yaml_obj = yaml.safe_load(target_file.read_text()) # Check the version was updated assert new_version == resulting_yaml_obj["version"] # Check the rest of the content is the same (by resetting the version & comparing) resulting_yaml_obj["version"] = orig_version assert yaml.safe_load(orig_yaml) == resulting_yaml_obj @pytest.mark.usefixtures(repo_w_no_tags_conventional_commits.__name__) def test_stamp_version_variables_yaml_cff( cli_runner: CliRunner, update_pyproject_toml: UpdatePyprojectTomlFn, ) -> None: """ Given a yaml file with a top level version directive, When the version command is run, Then the version is updated in the file and the rest of the content is unchanged & parsable Based on https://github.com/python-semantic-release/python-semantic-release/issues/962 """ orig_version = "0.0.0" new_version = "0.1.0" target_file = Path("CITATION.cff") orig_yaml = dedent( f"""\ --- cff-version: 1.2.0 message: "If you use this software, please cite it as below." authors: - family-names: Doe given-names: Jon orcid: https://orcid.org/1234-6666-2222-5555 title: "My Research Software" version: {orig_version} date-released: 1970-01-01 """ ) # Write initial text in file target_file.write_text(orig_yaml) # Set configuration to modify the yaml file update_pyproject_toml( "tool.semantic_release.version_variables", [f"{target_file}:version"] ) # Act cli_cmd = VERSION_STAMP_CMD result = cli_runner.invoke(main, cli_cmd[1:]) # Check the result assert_successful_exit_code(result, cli_cmd) # Read content resulting_yaml_obj = yaml.safe_load(target_file.read_text()) # Check the version was updated assert new_version == resulting_yaml_obj["version"] # Check the rest of the content is the same (by resetting the version & comparing) resulting_yaml_obj["version"] = orig_version assert yaml.safe_load(orig_yaml) == resulting_yaml_obj @pytest.mark.usefixtures(repo_w_no_tags_conventional_commits.__name__) def test_stamp_version_variables_json( cli_runner: CliRunner, update_pyproject_toml: UpdatePyprojectTomlFn, ) -> None: orig_version = "0.0.0" new_version = "0.1.0" target_file = Path("plugins.json") orig_json = { "id": "test-plugin", "version": orig_version, "meta": { "description": "Test plugin", }, } # Write initial text in file target_file.write_text(json.dumps(orig_json, indent=4)) # Set configuration to modify the json file update_pyproject_toml( "tool.semantic_release.version_variables", [f"{target_file}:version"] ) # Act cli_cmd = VERSION_STAMP_CMD result = cli_runner.invoke(main, cli_cmd[1:]) # Check the result assert_successful_exit_code(result, cli_cmd) # Read content resulting_json_obj = json.loads(target_file.read_text()) # Check the version was updated assert new_version == resulting_json_obj["version"] # Check the rest of the content is the same (by resetting the version & comparing) resulting_json_obj["version"] = orig_version assert orig_json == resulting_json_obj @pytest.mark.usefixtures(repo_w_no_tags_conventional_commits.__name__) def test_stamp_version_variables_yaml_github_actions( cli_runner: CliRunner, update_pyproject_toml: UpdatePyprojectTomlFn, default_tag_format_str: str, ) -> None: """ Given a yaml file with github actions 'uses:' directives which use @vX.Y.Z version declarations, When a version is stamped and configured to stamp the version using the tag format, Then the file is updated with the new version in the tag format Based on https://github.com/python-semantic-release/python-semantic-release/issues/1156 """ orig_version = "0.0.0" new_version = "0.1.0" target_file = Path("combined.yml") action1_yaml_filepath = "my-org/my-actions/.github/workflows/action1.yml" action2_yaml_filepath = "my-org/my-actions/.github/workflows/action2.yml" orig_yaml = dedent( f"""\ --- on: workflow_call: jobs: action1: uses: {action1_yaml_filepath}@{default_tag_format_str.format(version=orig_version)} action2: uses: {action2_yaml_filepath}@{default_tag_format_str.format(version=orig_version)} """ ) expected_action1_value = ( f"{action1_yaml_filepath}@{default_tag_format_str.format(version=new_version)}" ) expected_action2_value = ( f"{action2_yaml_filepath}@{default_tag_format_str.format(version=new_version)}" ) # Setup: Write initial text in file target_file.write_text(orig_yaml) # Setup: Set configuration to modify the yaml file update_pyproject_toml( "tool.semantic_release.version_variables", [ f"{target_file}:{action1_yaml_filepath}:{VersionStampType.TAG_FORMAT.value}", f"{target_file}:{action2_yaml_filepath}:{VersionStampType.TAG_FORMAT.value}", ], ) # Act cli_cmd = VERSION_STAMP_CMD result = cli_runner.invoke(main, cli_cmd[1:]) # Check the result assert_successful_exit_code(result, cli_cmd) # Read content resulting_yaml_obj = yaml.safe_load(target_file.read_text()) # Check the version was updated assert expected_action1_value == resulting_yaml_obj["jobs"]["action1"]["uses"] assert expected_action2_value == resulting_yaml_obj["jobs"]["action2"]["uses"] # Check the rest of the content is the same (by setting the version & comparing) original_yaml_obj = yaml.safe_load(orig_yaml) original_yaml_obj["jobs"]["action1"]["uses"] = expected_action1_value original_yaml_obj["jobs"]["action2"]["uses"] = expected_action2_value assert original_yaml_obj == resulting_yaml_obj @pytest.mark.usefixtures(repo_w_no_tags_conventional_commits.__name__) def test_stamp_version_variables_yaml_kustomization_container_spec( cli_runner: CliRunner, update_pyproject_toml: UpdatePyprojectTomlFn, default_tag_format_str: str, ) -> None: """ Given a yaml file with directives that expect a vX.Y.Z version tag declarations, When a version is stamped and configured to stamp the version using the tag format, Then the file is updated with the new version in the tag format Based on https://github.com/python-semantic-release/python-semantic-release/issues/846 """ orig_version = "0.0.0" new_version = "0.1.0" target_file = Path("kustomization.yaml") orig_yaml = dedent( f"""\ images: - name: repo/image newTag: {default_tag_format_str.format(version=orig_version)} """ ) expected_new_tag_value = default_tag_format_str.format(version=new_version) # Setup: Write initial text in file target_file.write_text(orig_yaml) # Setup: Set configuration to modify the yaml file update_pyproject_toml( "tool.semantic_release.version_variables", [ f"{target_file}:newTag:{VersionStampType.TAG_FORMAT.value}", ], ) # Act cli_cmd = VERSION_STAMP_CMD result = cli_runner.invoke(main, cli_cmd[1:]) # Check the result assert_successful_exit_code(result, cli_cmd) # Read content resulting_yaml_obj = yaml.safe_load(target_file.read_text()) # Check the version was updated assert expected_new_tag_value == resulting_yaml_obj["images"][0]["newTag"] # Check the rest of the content is the same (by resetting the version & comparing) original_yaml_obj = yaml.safe_load(orig_yaml) resulting_yaml_obj["images"][0]["newTag"] = original_yaml_obj["images"][0]["newTag"] assert original_yaml_obj == resulting_yaml_obj python-semantic-release-9.21.0/tests/e2e/cmd_version/test_version_strict.py000066400000000000000000000100171475670435200271720ustar00rootroot00000000000000from __future__ import annotations from typing import TYPE_CHECKING import pytest from pytest_lazy_fixtures.lazy_fixture import lf as lazy_fixture from semantic_release.cli.commands.main import main from tests.const import MAIN_PROG_NAME, VERSION_SUBCMD from tests.fixtures.repos import repo_w_trunk_only_conventional_commits from tests.util import assert_exit_code if TYPE_CHECKING: from unittest.mock import MagicMock from click.testing import CliRunner from requests_mock import Mocker from tests.fixtures.git_repo import BuiltRepoResult, GetVersionsFromRepoBuildDefFn @pytest.mark.parametrize( "repo_result", [lazy_fixture(repo_w_trunk_only_conventional_commits.__name__)], ) def test_version_already_released_when_strict( repo_result: BuiltRepoResult, get_versions_from_repo_build_def: GetVersionsFromRepoBuildDefFn, cli_runner: CliRunner, mocked_git_push: MagicMock, post_mocker: Mocker, ): """ Given repo has no new changes since the last release, When running the version command in strict mode, Then no version release should happen, which means no code changes, no build, no commit, no tag, no push, and no vcs release creation while returning an exit code of 2. """ repo = repo_result["repo"] latest_release_version = get_versions_from_repo_build_def( repo_result["definition"] )[-1] expected_error_msg = f"[bold orange1]No release will be made, {latest_release_version} has already been released!" # Setup: take measurement before running the version command repo_status_before = repo.git.status(short=True) head_before = repo.head.commit.hexsha tags_before = sorted([tag.name for tag in repo.tags]) # Act cli_cmd = [MAIN_PROG_NAME, "--strict", VERSION_SUBCMD] result = cli_runner.invoke(main, cli_cmd[1:]) # take measurement after running the version command repo_status_after = repo.git.status(short=True) head_after = repo.head.commit.hexsha tags_after = sorted([tag.name for tag in repo.tags]) # Evaluate assert_exit_code(2, result, cli_cmd) assert f"{latest_release_version}\n" == result.stdout assert f"{expected_error_msg}\n" == result.stderr # assert nothing else happened (no code changes, no commit, no tag, no push, no vcs release) assert repo_status_before == repo_status_after assert head_before == head_after assert tags_before == tags_after assert mocked_git_push.call_count == 0 assert post_mocker.call_count == 0 @pytest.mark.parametrize( "repo_result", [lazy_fixture(repo_w_trunk_only_conventional_commits.__name__)] ) def test_version_on_nonrelease_branch_when_strict( repo_result: BuiltRepoResult, cli_runner: CliRunner, mocked_git_push: MagicMock, post_mocker: Mocker, ): """ Given repo is on a non-release branch, When running the version command in strict mode, Then no version release should happen which means no code changes, no build, no commit, no tag, no push, and no vcs release creation while returning an exit code of 2. """ repo = repo_result["repo"] # Setup branch = repo.create_head("next").checkout() expected_error_msg = ( f"branch '{branch.name}' isn't in any release groups; no release will be made\n" ) repo_status_before = repo.git.status(short=True) head_before = repo.head.commit.hexsha tags_before = sorted([tag.name for tag in repo.tags]) # Act cli_cmd = [MAIN_PROG_NAME, "--strict", VERSION_SUBCMD] result = cli_runner.invoke(main, cli_cmd[1:]) # Evaluate assert_exit_code(2, result, cli_cmd) assert not result.stdout assert expected_error_msg == result.stderr # assert nothing else happened (no code changes, no commit, no tag, no push, no vcs release) tags_after = sorted([tag.name for tag in repo.tags]) assert repo_status_before == repo.git.status(short=True) assert head_before == repo.head.commit.hexsha assert tags_before == tags_after assert mocked_git_push.call_count == 0 assert post_mocker.call_count == 0 python-semantic-release-9.21.0/tests/e2e/conftest.py000066400000000000000000000146561475670435200224100ustar00rootroot00000000000000from __future__ import annotations import os from pathlib import Path from re import IGNORECASE, compile as regexp from typing import TYPE_CHECKING from unittest.mock import MagicMock import pytest from requests_mock import ANY from semantic_release.cli import config as cli_config_module from semantic_release.cli.config import ( GlobalCommandLineOptions, RawConfig, RuntimeContext, ) from semantic_release.cli.const import DEFAULT_CONFIG_FILE from semantic_release.cli.util import load_raw_config_file from tests.util import prepare_mocked_git_command_wrapper_type if TYPE_CHECKING: from re import Pattern from typing import Protocol from git.repo import Repo from pytest import MonkeyPatch from requests_mock.mocker import Mocker from tests.fixtures.example_project import ExProjectDir class GetSanitizedChangelogContentFn(Protocol): def __call__( self, repo_dir: Path, remove_insertion_flag: bool = True, ) -> str: ... class ReadConfigFileFn(Protocol): """Read the raw config file from `config_path`.""" def __call__(self, file: Path | str) -> RawConfig: ... class RetrieveRuntimeContextFn(Protocol): """Retrieve the runtime context for a repo.""" def __call__(self, repo: Repo) -> RuntimeContext: ... @pytest.hookimpl(tryfirst=True) def pytest_collection_modifyitems(items: list[pytest.Item]) -> None: """Apply the e2e marker to all tests in the end-to-end test directory.""" cli_test_directory = Path(__file__).parent for item in items: if cli_test_directory in item.path.parents: item.add_marker(pytest.mark.e2e) @pytest.fixture def post_mocker(requests_mock: Mocker) -> Mocker: """Patch all POST requests, mocking a response body for VCS release creation.""" requests_mock.register_uri("POST", ANY, json={"id": 999}) return requests_mock @pytest.fixture def mocked_git_push(monkeypatch: MonkeyPatch) -> MagicMock: """Mock the `Repo.git.push()` method in `semantic_release.cli.main`.""" mocked_push = MagicMock() cls = prepare_mocked_git_command_wrapper_type(push=mocked_push) monkeypatch.setattr(cli_config_module.Repo, "GitCommandWrapperType", cls) return mocked_push @pytest.fixture def config_path(example_project_dir: ExProjectDir) -> Path: return example_project_dir / DEFAULT_CONFIG_FILE @pytest.fixture def read_config_file() -> ReadConfigFileFn: def _read_config_file(file: Path | str) -> RawConfig: config_text = load_raw_config_file(file) return RawConfig.model_validate(config_text) return _read_config_file @pytest.fixture def cli_options(config_path: Path) -> GlobalCommandLineOptions: return GlobalCommandLineOptions( noop=False, verbosity=0, strict=False, config_file=str(config_path), ) @pytest.fixture def retrieve_runtime_context( read_config_file: ReadConfigFileFn, cli_options: GlobalCommandLineOptions, ) -> RetrieveRuntimeContextFn: def _retrieve_runtime_context(repo: Repo) -> RuntimeContext: cwd = os.getcwd() repo_dir = str(Path(repo.working_dir).resolve()) os.chdir(repo_dir) try: raw_config = read_config_file(cli_options.config_file) return RuntimeContext.from_raw_config(raw_config, cli_options) finally: os.chdir(cwd) return _retrieve_runtime_context @pytest.fixture(scope="session") def long_hash_pattern() -> Pattern: return regexp(r"\b([0-9a-f]{40})\b", IGNORECASE) @pytest.fixture(scope="session") def short_hash_pattern() -> Pattern: return regexp(r"\b([0-9a-f]{7})\b", IGNORECASE) @pytest.fixture(scope="session") def get_sanitized_rst_changelog_content( changelog_rst_file: Path, default_rst_changelog_insertion_flag: str, long_hash_pattern: Pattern, short_hash_pattern: Pattern, ) -> GetSanitizedChangelogContentFn: rst_short_hash_link_pattern = regexp(r"(_[0-9a-f]{7})\b", IGNORECASE) def _get_sanitized_rst_changelog_content( repo_dir: Path, remove_insertion_flag: bool = True, ) -> str: # TODO: v10 change -- default turns to update so this is not needed # Because we are in init mode, the insertion flag is not present in the changelog # we must take it out manually because our repo generation fixture includes it automatically with (repo_dir / changelog_rst_file).open(newline=os.linesep) as rfd: # use os.linesep here because the insertion flag is os-specific # but convert the content to universal newlines for comparison changelog_content = ( rfd.read().replace( f"{default_rst_changelog_insertion_flag}{os.linesep}", "" ) if remove_insertion_flag else rfd.read() ).replace("\r", "") changelog_content = long_hash_pattern.sub("0" * 40, changelog_content) changelog_content = short_hash_pattern.sub("0" * 7, changelog_content) return rst_short_hash_link_pattern.sub(f'_{"0" * 7}', changelog_content) return _get_sanitized_rst_changelog_content @pytest.fixture(scope="session") def get_sanitized_md_changelog_content( changelog_md_file: Path, default_md_changelog_insertion_flag: str, long_hash_pattern: Pattern, short_hash_pattern: Pattern, ) -> GetSanitizedChangelogContentFn: def _get_sanitized_md_changelog_content( repo_dir: Path, remove_insertion_flag: bool = True, ) -> str: # TODO: v10 change -- default turns to update so this is not needed # Because we are in init mode, the insertion flag is not present in the changelog # we must take it out manually because our repo generation fixture includes it automatically with (repo_dir / changelog_md_file).open(newline=os.linesep) as rfd: # use os.linesep here because the insertion flag is os-specific # but convert the content to universal newlines for comparison changelog_content = ( rfd.read().replace( f"{default_md_changelog_insertion_flag}{os.linesep}", "" ) if remove_insertion_flag else rfd.read() ).replace("\r", "") changelog_content = long_hash_pattern.sub("0" * 40, changelog_content) return short_hash_pattern.sub("0" * 7, changelog_content) return _get_sanitized_md_changelog_content python-semantic-release-9.21.0/tests/e2e/test_help.py000066400000000000000000000157661475670435200225550ustar00rootroot00000000000000from __future__ import annotations from typing import TYPE_CHECKING import pytest from pytest_lazy_fixtures.lazy_fixture import lf as lazy_fixture from semantic_release.cli.commands.changelog import changelog from semantic_release.cli.commands.generate_config import generate_config from semantic_release.cli.commands.main import main from semantic_release.cli.commands.publish import publish from semantic_release.cli.commands.version import version from tests.const import MAIN_PROG_NAME, SUCCESS_EXIT_CODE from tests.fixtures.repos import repo_w_trunk_only_conventional_commits from tests.util import assert_exit_code if TYPE_CHECKING: from click import Command from click.testing import CliRunner from git import Repo from tests.fixtures import UpdatePyprojectTomlFn from tests.fixtures.git_repo import BuiltRepoResult # Define the expected exit code for the help command HELP_EXIT_CODE = SUCCESS_EXIT_CODE @pytest.mark.parametrize( "help_option", ("-h", "--help"), ids=lambda opt: opt.lstrip("-") ) @pytest.mark.parametrize( "command", (main, changelog, generate_config, publish, version), ids=lambda cmd: cmd.name, ) def test_help_no_repo( help_option: str, command: Command, cli_runner: CliRunner, change_to_ex_proj_dir: None, ): """ Test that the help message is displayed even when the current directory is not a git repository and there is not a configuration file available. Documented issue #840 """ # Generate some expected output that should be specific per command cmd_usage = str.join( " ", list( filter( None, [ "Usage:", MAIN_PROG_NAME, command.name if command.name != "main" else "", "[OPTIONS]", "" if command.name != main.name else "COMMAND [ARGS]...", ], ) ), ) # Create the arguments list for subcommands unless its main args = list( filter(None, [command.name if command.name != main.name else "", help_option]) ) # Run the command with the help option result = cli_runner.invoke(main, args, prog_name=MAIN_PROG_NAME) # Evaluate result assert_exit_code(HELP_EXIT_CODE, result, [MAIN_PROG_NAME, *args]) assert cmd_usage in result.output @pytest.mark.parametrize( "help_option", ("-h", "--help"), ids=lambda opt: opt.lstrip("-") ) @pytest.mark.parametrize( "command", (main, changelog, generate_config, publish, version), ids=lambda cmd: cmd.name, ) @pytest.mark.usefixtures(repo_w_trunk_only_conventional_commits.__name__) def test_help_valid_config( help_option: str, command: Command, cli_runner: CliRunner, ): """ Test that the help message is displayed when the current directory is a git repository and there is a valid configuration file available. Documented issue #840 """ cmd_usage = str.join( " ", list( filter( None, [ "Usage:", MAIN_PROG_NAME, command.name if command.name != main.name else "", "[OPTIONS]", "" if command.name != main.name else "COMMAND [ARGS]...", ], ) ), ) # Create the arguments list for subcommands unless its main args = list( filter(None, [command.name if command.name != main.name else "", help_option]) ) # Run the command with the help option result = cli_runner.invoke(main, args, prog_name=MAIN_PROG_NAME) # Evaluate result assert_exit_code(HELP_EXIT_CODE, result, [MAIN_PROG_NAME, *args]) assert cmd_usage in result.output @pytest.mark.parametrize( "help_option", ("-h", "--help"), ids=lambda opt: opt.lstrip("-") ) @pytest.mark.parametrize( "command", (main, changelog, generate_config, publish, version), ids=lambda cmd: cmd.name, ) def test_help_invalid_config( help_option: str, command: Command, cli_runner: CliRunner, repo_w_trunk_only_conventional_commits: Repo, update_pyproject_toml: UpdatePyprojectTomlFn, ): """ Test that the help message is displayed when the current directory is a git repository and there is an invalid configuration file available. Documented issue #840 """ # Update the configuration file to have an invalid value update_pyproject_toml("tool.semantic_release.remote.type", "invalidhvcs") # Generate some expected output that should be specific per command cmd_usage = str.join( " ", list( filter( None, [ "Usage:", MAIN_PROG_NAME, command.name if command.name != "main" else "", "[OPTIONS]", "" if command.name != main.name else "COMMAND [ARGS]...", ], ) ), ) # Create the arguments list for subcommands unless its main args = list( filter(None, [command.name if command.name != main.name else "", help_option]) ) # Run the command with the help option result = cli_runner.invoke(main, args, prog_name=MAIN_PROG_NAME) # Evaluate result assert_exit_code(HELP_EXIT_CODE, result, [MAIN_PROG_NAME, *args]) assert cmd_usage in result.output @pytest.mark.parametrize( "help_option", ("-h", "--help"), ids=lambda opt: opt.lstrip("-") ) @pytest.mark.parametrize( "command", (main, changelog, generate_config, publish, version), ids=lambda cmd: cmd.name, ) @pytest.mark.parametrize( "repo_result", [lazy_fixture(repo_w_trunk_only_conventional_commits.__name__)] ) def test_help_non_release_branch( help_option: str, command: Command, cli_runner: CliRunner, repo_result: BuiltRepoResult, ): """ Test that the help message is displayed even when the current branch is not a release branch. Documented issue #840 """ # Create & checkout a non-release branch non_release_branch = repo_result["repo"].create_head("feature-branch") non_release_branch.checkout() # Generate some expected output that should be specific per command cmd_usage = str.join( " ", list( filter( None, [ "Usage:", MAIN_PROG_NAME, command.name if command.name != "main" else "", "[OPTIONS]", "" if command.name != main.name else "COMMAND [ARGS]...", ], ) ), ) # Create the arguments list for subcommands unless its main args = list( filter(None, [command.name if command.name != main.name else "", help_option]) ) # Run the command with the help option result = cli_runner.invoke(main, args, prog_name=MAIN_PROG_NAME) # Evaluate result assert_exit_code(HELP_EXIT_CODE, result, [MAIN_PROG_NAME, *args]) assert cmd_usage in result.output python-semantic-release-9.21.0/tests/e2e/test_main.py000066400000000000000000000164121475670435200225360ustar00rootroot00000000000000from __future__ import annotations import json import subprocess from pathlib import Path from textwrap import dedent from typing import TYPE_CHECKING import git import pytest from pytest_lazy_fixtures.lazy_fixture import lf as lazy_fixture from semantic_release import __version__ from semantic_release.cli.commands.main import main from tests.const import MAIN_PROG_NAME, SUCCESS_EXIT_CODE, VERSION_SUBCMD from tests.fixtures.repos import repo_w_no_tags_conventional_commits from tests.util import assert_exit_code, assert_successful_exit_code if TYPE_CHECKING: from pathlib import Path from click.testing import CliRunner from tests.fixtures.example_project import ExProjectDir, UpdatePyprojectTomlFn from tests.fixtures.git_repo import BuiltRepoResult @pytest.mark.parametrize( "project_script_name", [ "python-semantic-release", "semantic-release", "psr", ], ) def test_entrypoint_scripts(project_script_name: str): # Setup command = str.join(" ", [project_script_name, "--version"]) expected_output = f"semantic-release, version {__version__}\n" # Act proc = subprocess.run( # noqa: S602, PLW1510 command, shell=True, text=True, capture_output=True ) # Evaluate assert SUCCESS_EXIT_CODE == proc.returncode # noqa: SIM300 assert expected_output == proc.stdout assert not proc.stderr def test_main_prints_version_and_exits(cli_runner: CliRunner): cli_cmd = [MAIN_PROG_NAME, "--version"] # Act result = cli_runner.invoke(main, cli_cmd[1:]) # Evaluate assert_successful_exit_code(result, cli_cmd) assert result.output == f"semantic-release, version {__version__}\n" def test_main_no_args_prints_help_text(cli_runner: CliRunner): result = cli_runner.invoke(main, []) assert_successful_exit_code(result, [MAIN_PROG_NAME]) @pytest.mark.parametrize( "repo_result", [lazy_fixture(repo_w_no_tags_conventional_commits.__name__)], ) def test_not_a_release_branch_exit_code( repo_result: BuiltRepoResult, cli_runner: CliRunner ): # Run anything that doesn't trigger the help text repo_result["repo"].git.checkout("-b", "branch-does-not-exist") # Act cli_cmd = [MAIN_PROG_NAME, VERSION_SUBCMD, "--no-commit"] result = cli_runner.invoke(main, cli_cmd[1:]) # Evaluate assert_successful_exit_code(result, cli_cmd) @pytest.mark.parametrize( "repo_result", [lazy_fixture(repo_w_no_tags_conventional_commits.__name__)], ) def test_not_a_release_branch_exit_code_with_strict( repo_result: BuiltRepoResult, cli_runner: CliRunner, ): # Run anything that doesn't trigger the help text repo_result["repo"].git.checkout("-b", "branch-does-not-exist") # Act cli_cmd = [MAIN_PROG_NAME, "--strict", VERSION_SUBCMD, "--no-commit"] result = cli_runner.invoke(main, cli_cmd[1:]) # Evaluate assert_exit_code(2, result, cli_cmd) @pytest.mark.parametrize( "repo_result", [lazy_fixture(repo_w_no_tags_conventional_commits.__name__)], ) def test_not_a_release_branch_detached_head_exit_code( repo_result: BuiltRepoResult, cli_runner: CliRunner, ): expected_err_msg = ( "Detached HEAD state cannot match any release groups; no release will be made" ) # cause repo to be in detached head state without file changes repo_result["repo"].git.checkout("HEAD", "--detach") # Act cli_cmd = [MAIN_PROG_NAME, VERSION_SUBCMD, "--no-commit"] result = cli_runner.invoke(main, cli_cmd[1:]) # detached head states should throw an error as release branches cannot be determined assert_exit_code(1, result, cli_cmd) assert expected_err_msg in result.stderr @pytest.fixture def toml_file_with_no_configuration_for_psr(tmp_path: Path) -> Path: path = tmp_path / "config.toml" path.write_text( dedent( r""" [project] name = "foo" version = "1.2.0" """ ) ) return path @pytest.fixture def json_file_with_no_configuration_for_psr(tmp_path: Path) -> Path: path = tmp_path / "config.json" path.write_text(json.dumps({"foo": [1, 2, 3]})) return path @pytest.mark.usefixtures(repo_w_no_tags_conventional_commits.__name__) def test_default_config_is_used_when_none_in_toml_config_file( cli_runner: CliRunner, toml_file_with_no_configuration_for_psr: Path, ): cli_cmd = [ MAIN_PROG_NAME, "--noop", "--config", str(toml_file_with_no_configuration_for_psr), VERSION_SUBCMD, ] # Act result = cli_runner.invoke(main, cli_cmd[1:]) # Evaluate assert_successful_exit_code(result, cli_cmd) @pytest.mark.usefixtures(repo_w_no_tags_conventional_commits.__name__) def test_default_config_is_used_when_none_in_json_config_file( cli_runner: CliRunner, json_file_with_no_configuration_for_psr: Path, ): cli_cmd = [ MAIN_PROG_NAME, "--noop", "--config", str(json_file_with_no_configuration_for_psr), VERSION_SUBCMD, ] # Act result = cli_runner.invoke(main, cli_cmd[1:]) # Evaluate assert_successful_exit_code(result, cli_cmd) @pytest.mark.usefixtures(repo_w_no_tags_conventional_commits.__name__) def test_errors_when_config_file_does_not_exist_and_passed_explicitly( cli_runner: CliRunner, ): cli_cmd = [ MAIN_PROG_NAME, "--noop", "--config", "somenonexistantfile.123.txt", VERSION_SUBCMD, ] # Act result = cli_runner.invoke(main, cli_cmd[1:]) # Evaluate assert_exit_code(2, result, cli_cmd) assert "does not exist" in result.stderr @pytest.mark.usefixtures(repo_w_no_tags_conventional_commits.__name__) def test_errors_when_config_file_invalid_configuration( cli_runner: CliRunner, update_pyproject_toml: UpdatePyprojectTomlFn ): # Setup update_pyproject_toml("tool.semantic_release.remote.type", "invalidType") cli_cmd = [MAIN_PROG_NAME, "--config", "pyproject.toml", VERSION_SUBCMD] # Act result = cli_runner.invoke(main, cli_cmd[1:]) # preprocess results stderr_lines = result.stderr.splitlines() # Evaluate assert_exit_code(1, result, cli_cmd) assert "1 validation error for RawConfig" in stderr_lines[0] assert "remote.type" in stderr_lines[1] def test_uses_default_config_when_no_config_file_found( cli_runner: CliRunner, example_project_dir: ExProjectDir, change_to_ex_proj_dir: None, ): # We have to initialise an empty git repository, as the example projects # all have pyproject.toml configs which would be used by default with git.Repo.init(example_project_dir) as repo: repo.git.branch("-M", "main") with repo.config_writer("repository") as config: config.set_value("user", "name", "semantic release testing") config.set_value("user", "email", "not_a_real@email.com") config.set_value("commit", "gpgsign", False) config.set_value("tag", "gpgsign", False) repo.create_remote(name="origin", url="foo@barvcs.com:user/repo.git") repo.git.commit("-m", "feat: initial commit", "--allow-empty") cli_cmd = [MAIN_PROG_NAME, "--noop", VERSION_SUBCMD] # Act result = cli_runner.invoke(main, cli_cmd[1:]) # Evaluate assert_successful_exit_code(result, cli_cmd) python-semantic-release-9.21.0/tests/fixtures/000077500000000000000000000000001475670435200213735ustar00rootroot00000000000000python-semantic-release-9.21.0/tests/fixtures/__init__.py000066400000000000000000000003051475670435200235020ustar00rootroot00000000000000from tests.fixtures.commit_parsers import * from tests.fixtures.example_project import * from tests.fixtures.git_repo import * from tests.fixtures.repos import * from tests.fixtures.scipy import * python-semantic-release-9.21.0/tests/fixtures/commit_parsers.py000066400000000000000000000036611475670435200250020ustar00rootroot00000000000000import pytest from semantic_release.commit_parser import ( ConventionalCommitParser, ConventionalCommitParserOptions, EmojiCommitParser, EmojiParserOptions, ) from tests.const import ( CONVENTIONAL_COMMITS_CHORE, CONVENTIONAL_COMMITS_MAJOR, CONVENTIONAL_COMMITS_MINOR, CONVENTIONAL_COMMITS_PATCH, EMOJI_COMMITS_CHORE, EMOJI_COMMITS_MAJOR, EMOJI_COMMITS_MINOR, EMOJI_COMMITS_PATCH, ) # Note scipy defined in ./scipy.py as already used there @pytest.fixture(scope="session") def default_conventional_parser() -> ConventionalCommitParser: return ConventionalCommitParser() @pytest.fixture(scope="session") def default_conventional_parser_options( default_conventional_parser: ConventionalCommitParser, ) -> ConventionalCommitParserOptions: return default_conventional_parser.get_default_options() @pytest.fixture(scope="session") def default_emoji_parser() -> EmojiCommitParser: return EmojiCommitParser() @pytest.fixture(scope="session") def default_emoji_parser_options( default_emoji_parser: EmojiCommitParser, ) -> EmojiParserOptions: return default_emoji_parser.get_default_options() @pytest.fixture(scope="session") def conventional_major_commits(): return CONVENTIONAL_COMMITS_MAJOR @pytest.fixture(scope="session") def conventional_minor_commits(): return CONVENTIONAL_COMMITS_MINOR @pytest.fixture(scope="session") def conventional_patch_commits(): return CONVENTIONAL_COMMITS_PATCH @pytest.fixture(scope="session") def conventional_chore_commits(): return CONVENTIONAL_COMMITS_CHORE @pytest.fixture(scope="session") def emoji_major_commits(): return EMOJI_COMMITS_MAJOR @pytest.fixture(scope="session") def emoji_minor_commits(): return EMOJI_COMMITS_MINOR @pytest.fixture(scope="session") def emoji_patch_commits(): return EMOJI_COMMITS_PATCH @pytest.fixture(scope="session") def emoji_chore_commits(): return EMOJI_COMMITS_CHORE python-semantic-release-9.21.0/tests/fixtures/example_project.py000066400000000000000000000402611475670435200251310ustar00rootroot00000000000000from __future__ import annotations import os from pathlib import Path from textwrap import dedent from typing import TYPE_CHECKING, Generator import pytest import tomlkit # NOTE: use backport with newer API from importlib_resources import files import semantic_release from semantic_release.commit_parser import ( ConventionalCommitParser, EmojiCommitParser, ScipyCommitParser, ) from semantic_release.hvcs import Bitbucket, Gitea, Github, Gitlab import tests.conftest import tests.const import tests.util from tests.const import ( EXAMPLE_CHANGELOG_MD_CONTENT, EXAMPLE_CHANGELOG_RST_CONTENT, EXAMPLE_PROJECT_NAME, EXAMPLE_PROJECT_VERSION, EXAMPLE_PYPROJECT_TOML_CONTENT, EXAMPLE_RELEASE_NOTES_TEMPLATE, EXAMPLE_SETUP_CFG_CONTENT, EXAMPLE_SETUP_PY_CONTENT, ) from tests.util import copy_dir_tree, temporary_working_directory if TYPE_CHECKING: from typing import Any, Protocol, Sequence from semantic_release.commit_parser import CommitParser from semantic_release.hvcs import HvcsBase from semantic_release.version.version import Version from tests.conftest import ( BuildRepoOrCopyCacheFn, GetMd5ForSetOfFilesFn, ) from tests.fixtures.git_repo import RepoActions ExProjectDir = Path class GetWheelFileFn(Protocol): def __call__(self, version_str: str) -> Path: ... class SetFlagFn(Protocol): def __call__(self, flag: bool) -> None: ... class UpdatePyprojectTomlFn(Protocol): def __call__(self, setting: str, value: Any) -> None: ... class UseCustomParserFn(Protocol): def __call__(self, module_import_str: str) -> None: ... class UseHvcsFn(Protocol): def __call__(self, domain: str | None = None) -> type[HvcsBase]: ... class UseParserFn(Protocol): def __call__(self) -> type[CommitParser]: ... class UseReleaseNotesTemplateFn(Protocol): def __call__(self) -> None: ... class UpdateVersionPyFileFn(Protocol): def __call__(self, version: Version | str) -> None: ... @pytest.fixture(scope="session") def deps_files_4_example_project() -> list[Path]: return [ # This file Path(__file__).absolute(), # because of imports Path(tests.const.__file__).absolute(), Path(tests.util.__file__).absolute(), # because of the fixtures Path(tests.conftest.__file__).absolute(), ] @pytest.fixture(scope="session") def build_spec_hash_4_example_project( get_md5_for_set_of_files: GetMd5ForSetOfFilesFn, deps_files_4_example_project: list[Path], ) -> str: # Generates a hash of the build spec to set when to invalidate the cache return get_md5_for_set_of_files(deps_files_4_example_project) @pytest.fixture(scope="session") def cached_example_project( build_repo_or_copy_cache: BuildRepoOrCopyCacheFn, version_py_file: Path, pyproject_toml_file: Path, setup_cfg_file: Path, setup_py_file: Path, changelog_md_file: Path, changelog_rst_file: Path, build_spec_hash_4_example_project: str, update_version_py_file: UpdateVersionPyFileFn, ) -> Path: """ Initializes the example project. DO NOT USE DIRECTLY Use the `init_example_project` fixture instead. """ def _build_project(cached_project_path: Path) -> Sequence[RepoActions]: # purposefully a relative path example_dir = version_py_file.parent gitignore_contents = dedent( f""" *.pyc /src/**/{version_py_file.name} """ ).lstrip() init_py_contents = dedent( ''' """ An example package with a very informative docstring """ from ._version import __version__ def hello_world() -> None: print("Hello World") ''' ).lstrip() with temporary_working_directory(cached_project_path): update_version_py_file(EXAMPLE_PROJECT_VERSION) file_2_contents: list[tuple[str | Path, str]] = [ (example_dir / "__init__.py", init_py_contents), (".gitignore", gitignore_contents), (pyproject_toml_file, EXAMPLE_PYPROJECT_TOML_CONTENT), (setup_cfg_file, EXAMPLE_SETUP_CFG_CONTENT), (setup_py_file, EXAMPLE_SETUP_PY_CONTENT), (changelog_md_file, EXAMPLE_CHANGELOG_MD_CONTENT), (changelog_rst_file, EXAMPLE_CHANGELOG_RST_CONTENT), ] for file, contents in file_2_contents: abs_filepath = cached_project_path.joinpath(file).resolve() # make sure the parent directory exists abs_filepath.parent.mkdir(parents=True, exist_ok=True) # write file contents abs_filepath.write_text(contents) # This is a special build, we don't expose the Repo Actions to the caller return [] # End of _build_project() return build_repo_or_copy_cache( repo_name=f"project_{EXAMPLE_PROJECT_NAME}", build_spec_hash=build_spec_hash_4_example_project, build_repo_func=_build_project, ) @pytest.fixture def init_example_project( example_project_dir: ExProjectDir, cached_example_project: Path, change_to_ex_proj_dir: None, ) -> None: """This fixture initializes the example project in the current test's project directory.""" if not cached_example_project.exists(): raise RuntimeError( f"Unable to find cached project files for {EXAMPLE_PROJECT_NAME}" ) # Copy the cached project files into the current test's project directory copy_dir_tree(cached_example_project, example_project_dir) @pytest.fixture def example_project_with_release_notes_template( init_example_project: None, use_release_notes_template: UseReleaseNotesTemplateFn, ) -> None: use_release_notes_template() @pytest.fixture(scope="session") def version_py_file() -> Path: return Path("src", EXAMPLE_PROJECT_NAME, "_version.py") @pytest.fixture(scope="session") def pyproject_toml_file() -> Path: return Path("pyproject.toml") @pytest.fixture(scope="session") def setup_cfg_file() -> Path: return Path("setup.cfg") @pytest.fixture(scope="session") def setup_py_file() -> Path: return Path("setup.py") @pytest.fixture(scope="session") def dist_dir() -> Path: return Path("dist") @pytest.fixture(scope="session") def changelog_md_file() -> Path: return Path("CHANGELOG.md") @pytest.fixture(scope="session") def changelog_rst_file() -> Path: return Path("CHANGELOG.rst") @pytest.fixture(scope="session") def changelog_template_dir() -> Path: return Path("templates") @pytest.fixture(scope="session") def default_md_changelog_insertion_flag() -> str: return "" @pytest.fixture(scope="session") def default_rst_changelog_insertion_flag() -> str: return f"..{os.linesep} version list" @pytest.fixture(scope="session") def default_changelog_md_template() -> Path: """Retrieve the semantic-release default changelog template file""" return Path( str( files(semantic_release.__name__).joinpath( Path("data", "templates", "angular", "md", "CHANGELOG.md.j2") ) ) ).resolve() @pytest.fixture(scope="session") def default_changelog_rst_template() -> Path: """Retrieve the semantic-release default changelog template file""" return Path( str( files(semantic_release.__name__).joinpath( Path("data", "templates", "angular", "rst", "CHANGELOG.rst.j2") ) ) ).resolve() @pytest.fixture(scope="session") def get_wheel_file(dist_dir: Path) -> GetWheelFileFn: def _get_wheel_file(version_str: str) -> Path: return dist_dir / f"{EXAMPLE_PROJECT_NAME}-{version_str}-py3-none-any.whl" return _get_wheel_file @pytest.fixture def example_project_dir(tmp_path: Path) -> ExProjectDir: return tmp_path.resolve() @pytest.fixture def change_to_ex_proj_dir( example_project_dir: ExProjectDir, ) -> Generator[None, None, None]: cwd = os.getcwd() tgt_dir = str(example_project_dir.resolve()) if cwd == tgt_dir: return os.chdir(tgt_dir) try: yield finally: os.chdir(cwd) @pytest.fixture def use_release_notes_template( example_project_template_dir: Path, changelog_template_dir: Path, update_pyproject_toml: UpdatePyprojectTomlFn, ) -> UseReleaseNotesTemplateFn: def _use_release_notes_template() -> None: update_pyproject_toml( "tool.semantic_release.changelog.template_dir", str(changelog_template_dir), ) example_project_template_dir.mkdir(parents=True, exist_ok=True) release_notes_j2 = example_project_template_dir / ".release_notes.md.j2" release_notes_j2.write_text(EXAMPLE_RELEASE_NOTES_TEMPLATE) return _use_release_notes_template @pytest.fixture def example_pyproject_toml( example_project_dir: ExProjectDir, pyproject_toml_file: Path, ) -> Path: return example_project_dir / pyproject_toml_file @pytest.fixture def example_setup_cfg( example_project_dir: ExProjectDir, setup_cfg_file: Path, ) -> Path: return example_project_dir / setup_cfg_file @pytest.fixture def example_setup_py( example_project_dir: ExProjectDir, setup_py_file: Path, ) -> Path: return example_project_dir / setup_py_file @pytest.fixture def example_dist_dir( example_project_dir: ExProjectDir, dist_dir: Path, ) -> Path: return example_project_dir / dist_dir @pytest.fixture def example_project_wheel_file( example_dist_dir: Path, get_wheel_file: GetWheelFileFn, ) -> Path: return example_dist_dir / get_wheel_file(EXAMPLE_PROJECT_VERSION) # Note this is just the path and the content may change @pytest.fixture def example_changelog_md( example_project_dir: ExProjectDir, changelog_md_file: Path, ) -> Path: return example_project_dir / changelog_md_file # Note this is just the path and the content may change @pytest.fixture def example_changelog_rst( example_project_dir: ExProjectDir, changelog_rst_file: Path, ) -> Path: return example_project_dir / changelog_rst_file @pytest.fixture def example_project_template_dir( example_project_dir: ExProjectDir, changelog_template_dir: Path, ) -> Path: return example_project_dir / changelog_template_dir @pytest.fixture(scope="session") def update_version_py_file(version_py_file: Path) -> UpdateVersionPyFileFn: def _update_version_py_file(version: Version | str) -> None: cwd_version_py = version_py_file.resolve() cwd_version_py.parent.mkdir(parents=True, exist_ok=True) cwd_version_py.write_text( dedent( f"""\ __version__ = "{version}" """ ) ) return _update_version_py_file @pytest.fixture(scope="session") def update_pyproject_toml(pyproject_toml_file: Path) -> UpdatePyprojectTomlFn: """Update the pyproject.toml file with the given content.""" def _update_pyproject_toml(setting: str, value: Any) -> None: cwd_pyproject_toml = pyproject_toml_file.resolve() with open(cwd_pyproject_toml) as rfd: pyproject_toml = tomlkit.load(rfd) new_setting = {} parts = setting.split(".") new_setting_key = parts.pop(-1) new_setting[new_setting_key] = value pointer = pyproject_toml for part in parts: if pointer.get(part, None) is None: pointer.add(part, tomlkit.table()) pointer = pointer.get(part, {}) if value is None: pointer.pop(new_setting_key) else: pointer.update(new_setting) with open(cwd_pyproject_toml, "w") as wfd: tomlkit.dump(pyproject_toml, wfd) return _update_pyproject_toml @pytest.fixture(scope="session") def pyproject_toml_config_option_parser() -> str: return f"tool.{semantic_release.__name__}.commit_parser" @pytest.fixture(scope="session") def set_major_on_zero(update_pyproject_toml: UpdatePyprojectTomlFn) -> SetFlagFn: """Turn on/off the major_on_zero setting.""" def _set_major_on_zero(flag: bool) -> None: update_pyproject_toml("tool.semantic_release.major_on_zero", flag) return _set_major_on_zero @pytest.fixture(scope="session") def set_allow_zero_version(update_pyproject_toml: UpdatePyprojectTomlFn) -> SetFlagFn: """Turn on/off the allow_zero_version setting.""" def _set_allow_zero_version(flag: bool) -> None: update_pyproject_toml("tool.semantic_release.allow_zero_version", flag) return _set_allow_zero_version @pytest.fixture(scope="session") def use_conventional_parser( update_pyproject_toml: UpdatePyprojectTomlFn, pyproject_toml_config_option_parser: str, ) -> UseParserFn: """Modify the configuration file to use the Conventional parser.""" def _use_conventional_parser() -> type[CommitParser]: update_pyproject_toml(pyproject_toml_config_option_parser, "conventional") return ConventionalCommitParser return _use_conventional_parser @pytest.fixture(scope="session") def use_emoji_parser( update_pyproject_toml: UpdatePyprojectTomlFn, pyproject_toml_config_option_parser: str, ) -> UseParserFn: """Modify the configuration file to use the Emoji parser.""" def _use_emoji_parser() -> type[CommitParser]: update_pyproject_toml(pyproject_toml_config_option_parser, "emoji") return EmojiCommitParser return _use_emoji_parser @pytest.fixture(scope="session") def use_scipy_parser( update_pyproject_toml: UpdatePyprojectTomlFn, pyproject_toml_config_option_parser: str, ) -> UseParserFn: """Modify the configuration file to use the Scipy parser.""" def _use_scipy_parser() -> type[CommitParser]: update_pyproject_toml(pyproject_toml_config_option_parser, "scipy") return ScipyCommitParser return _use_scipy_parser @pytest.fixture(scope="session") def use_custom_parser( update_pyproject_toml: UpdatePyprojectTomlFn, pyproject_toml_config_option_parser: str, ) -> UseCustomParserFn: """Modify the configuration file to use a user defined string parser.""" def _use_custom_parser(module_import_str: str) -> None: update_pyproject_toml(pyproject_toml_config_option_parser, module_import_str) return _use_custom_parser @pytest.fixture(scope="session") def use_github_hvcs(update_pyproject_toml: UpdatePyprojectTomlFn) -> UseHvcsFn: """Modify the configuration file to use GitHub as the HVCS.""" def _use_github_hvcs(domain: str | None = None) -> type[HvcsBase]: update_pyproject_toml("tool.semantic_release.remote.type", "github") if domain is not None: update_pyproject_toml("tool.semantic_release.remote.domain", domain) return Github return _use_github_hvcs @pytest.fixture(scope="session") def use_gitlab_hvcs(update_pyproject_toml: UpdatePyprojectTomlFn) -> UseHvcsFn: """Modify the configuration file to use GitLab as the HVCS.""" def _use_gitlab_hvcs(domain: str | None = None) -> type[HvcsBase]: update_pyproject_toml("tool.semantic_release.remote.type", "gitlab") if domain is not None: update_pyproject_toml("tool.semantic_release.remote.domain", domain) return Gitlab return _use_gitlab_hvcs @pytest.fixture(scope="session") def use_gitea_hvcs(update_pyproject_toml: UpdatePyprojectTomlFn) -> UseHvcsFn: """Modify the configuration file to use Gitea as the HVCS.""" def _use_gitea_hvcs(domain: str | None = None) -> type[HvcsBase]: update_pyproject_toml("tool.semantic_release.remote.type", "gitea") if domain is not None: update_pyproject_toml("tool.semantic_release.remote.domain", domain) return Gitea return _use_gitea_hvcs @pytest.fixture(scope="session") def use_bitbucket_hvcs(update_pyproject_toml: UpdatePyprojectTomlFn) -> UseHvcsFn: """Modify the configuration file to use BitBucket as the HVCS.""" def _use_bitbucket_hvcs(domain: str | None = None) -> type[HvcsBase]: update_pyproject_toml("tool.semantic_release.remote.type", "bitbucket") if domain is not None: update_pyproject_toml("tool.semantic_release.remote.domain", domain) return Bitbucket return _use_bitbucket_hvcs python-semantic-release-9.21.0/tests/fixtures/git_repo.py000066400000000000000000002323521475670435200235640ustar00rootroot00000000000000from __future__ import annotations import os import sys from copy import deepcopy from datetime import datetime, timedelta from functools import reduce from pathlib import Path from textwrap import dedent from time import sleep from typing import TYPE_CHECKING from unittest import mock import pytest from git import Actor, Repo from semantic_release.cli.config import ChangelogOutputFormat from semantic_release.commit_parser.conventional import ( ConventionalCommitParser, ConventionalCommitParserOptions, ) from semantic_release.commit_parser.emoji import EmojiCommitParser, EmojiParserOptions from semantic_release.commit_parser.scipy import ScipyCommitParser, ScipyParserOptions from semantic_release.hvcs.bitbucket import Bitbucket from semantic_release.hvcs.gitea import Gitea from semantic_release.hvcs.github import Github from semantic_release.hvcs.gitlab import Gitlab from semantic_release.version.version import Version import tests.conftest import tests.const import tests.util from tests.const import ( COMMIT_MESSAGE, DEFAULT_BRANCH_NAME, EXAMPLE_HVCS_DOMAIN, EXAMPLE_REPO_NAME, EXAMPLE_REPO_OWNER, NULL_HEX_SHA, RepoActionStep, ) from tests.util import ( add_text_to_file, copy_dir_tree, temporary_working_directory, ) if TYPE_CHECKING: from typing import Any, Generator, Literal, Protocol, Sequence, TypedDict, Union from tests.fixtures.example_project import UpdateVersionPyFileFn try: # Python 3.8 and 3.9 compatibility from typing_extensions import TypeAlias except ImportError: from typing import TypeAlias # type: ignore[attr-defined, no-redef] from typing_extensions import NotRequired from semantic_release.hvcs import HvcsBase from tests.conftest import ( BuildRepoOrCopyCacheFn, GetMd5ForSetOfFilesFn, GetStableDateNowFn, ) from tests.fixtures.example_project import ( ExProjectDir, GetWheelFileFn, UpdatePyprojectTomlFn, UseCustomParserFn, UseHvcsFn, UseParserFn, ) CommitConvention = Literal["conventional", "emoji", "scipy"] VersionStr = str CommitMsg = str DatetimeISOStr = str ChangelogTypeHeading = str TomlSerializableTypes = Union[dict, set, list, tuple, int, float, bool, str] class RepoVersionDef(TypedDict): """ A reduced common repo definition, that is specific to a type of commit conventions Used for builder functions that only need to know about a single commit convention type """ commits: list[CommitDef] class BaseAccumulatorVersionReduction(TypedDict): version_limit: Version repo_def: RepoDefinition class ChangelogTypeHeadingDef(TypedDict): section: ChangelogTypeHeading i_commits: list[int] """List of indexes values to match to the commits list in the RepoVersionDef""" class CommitDef(TypedDict): msg: CommitMsg type: str category: str desc: str brking_desc: str scope: str mr: str sha: str datetime: NotRequired[DatetimeISOStr] include_in_changelog: bool class BaseRepoVersionDef(TypedDict): """A Common Repo definition for a get_commits_repo_*() fixture with all commit convention types""" changelog_sections: dict[CommitConvention, list[ChangelogTypeHeadingDef]] commits: list[dict[CommitConvention, str]] class BuildRepoFn(Protocol): def __call__( self, dest_dir: Path | str, commit_type: CommitConvention = ..., hvcs_client_name: str = ..., hvcs_domain: str = ..., tag_format_str: str | None = None, extra_configs: dict[str, TomlSerializableTypes] | None = None, mask_initial_release: bool = False, ) -> tuple[Path, HvcsBase]: ... class CommitNReturnChangelogEntryFn(Protocol): def __call__(self, git_repo: Repo, commit_def: CommitDef) -> CommitDef: ... class SimulateChangeCommitsNReturnChangelogEntryFn(Protocol): def __call__( self, git_repo: Repo, commit_msgs: Sequence[CommitDef] ) -> Sequence[CommitDef]: ... class CreateReleaseFn(Protocol): def __call__( self, git_repo: Repo, version: str, tag_format: str = ..., timestamp: DatetimeISOStr | None = None, ) -> None: ... class ExProjectGitRepoFn(Protocol): def __call__(self) -> Repo: ... class ExtractRepoDefinitionFn(Protocol): def __call__( self, base_repo_def: dict[str, BaseRepoVersionDef], commit_type: CommitConvention, ) -> RepoDefinition: ... class GetCommitDefFn(Protocol): def __call__(self, msg: str) -> CommitDef: ... class GetVersionStringsFn(Protocol): def __call__(self) -> list[VersionStr]: ... class GetCommitsFromRepoBuildDefFn(Protocol): def __call__( self, build_definition: Sequence[RepoActions], filter_4_changelog: bool = False, ) -> RepoDefinition: ... RepoDefinition: TypeAlias = dict[VersionStr, RepoVersionDef] # type: ignore[misc] # mypy is thoroughly confused """ A Type alias to define a repositories versions, commits, and changelog sections for a specific commit convention """ class SimulateDefaultChangelogCreationFn(Protocol): def __call__( self, repo_definition: RepoDefinition, hvcs: Github | Gitlab | Gitea | Bitbucket, dest_file: Path | None = None, max_version: str | None = None, output_format: ChangelogOutputFormat = ChangelogOutputFormat.MARKDOWN, mask_initial_release: bool = False, ) -> str: ... class FormatGitSquashCommitMsgFn(Protocol): def __call__( self, squashed_commits: list[CommitDef], ) -> str: ... class FormatGitHubSquashCommitMsgFn(Protocol): def __call__( self, pr_title: str, pr_number: int, squashed_commits: list[CommitDef | str], ) -> str: ... class FormatBitBucketSquashCommitMsgFn(Protocol): def __call__( self, branch_name: str, pr_title: str, pr_number: int, squashed_commits: list[CommitDef], ) -> str: ... class FormatGitMergeCommitMsgFn(Protocol): def __call__(self, branch_name: str, tgt_branch_name: str) -> str: ... class FormatGitHubMergeCommitMsgFn(Protocol): def __call__(self, pr_number: int, branch_name: str) -> str: ... class FormatGitLabMergeCommitMsgFn(Protocol): def __call__( self, mr_title: str, mr_number: int, source_branch: str, target_branch: str, closed_issues: list[str], ) -> str: ... class CreateMergeCommitFn(Protocol): def __call__( self, git_repo: Repo, branch_name: str, commit_def: CommitDef, fast_forward: bool = True, ) -> CommitDef: ... class CreateSquashMergeCommitFn(Protocol): def __call__( self, git_repo: Repo, branch_name: str, commit_def: CommitDef, strategy_option: str = "theirs", ) -> CommitDef: ... class CommitSpec(TypedDict): conventional: str emoji: str scipy: str datetime: NotRequired[DatetimeISOStr] include_in_changelog: NotRequired[bool] class DetailsBase(TypedDict): pre_actions: NotRequired[Sequence[RepoActions]] post_actions: NotRequired[Sequence[RepoActions]] class RepoActionConfigure(TypedDict): action: Literal[RepoActionStep.CONFIGURE] details: RepoActionConfigureDetails class RepoActionConfigureDetails(DetailsBase): commit_type: CommitConvention hvcs_client_name: str hvcs_domain: str tag_format_str: str | None mask_initial_release: bool extra_configs: dict[str, TomlSerializableTypes] class RepoActionMakeCommits(TypedDict): action: Literal[RepoActionStep.MAKE_COMMITS] details: RepoActionMakeCommitsDetails class RepoActionMakeCommitsDetails(DetailsBase): commits: Sequence[CommitDef] class RepoActionRelease(TypedDict): action: Literal[RepoActionStep.RELEASE] details: RepoActionReleaseDetails class RepoActionReleaseDetails(DetailsBase): version: str datetime: DatetimeISOStr class RepoActionGitCheckout(TypedDict): action: Literal[RepoActionStep.GIT_CHECKOUT] details: RepoActionGitCheckoutDetails class RepoActionGitCheckoutDetails(DetailsBase): create_branch: NotRequired[RepoActionGitCheckoutCreateBranch] branch: NotRequired[str] class RepoActionGitCheckoutCreateBranch(TypedDict): name: str start_branch: str class RepoActionGitSquash(TypedDict): action: Literal[RepoActionStep.GIT_SQUASH] details: RepoActionGitSquashDetails class RepoActionGitSquashDetails(DetailsBase): branch: str strategy_option: str commit_def: CommitDef class RepoActionGitMerge(TypedDict): action: Literal[RepoActionStep.GIT_MERGE] details: RepoActionGitMergeDetails | RepoActionGitFFMergeDetails class RepoActionGitMergeDetails(DetailsBase): branch_name: str commit_def: CommitDef fast_forward: Literal[False] # strategy_option: str class RepoActionGitFFMergeDetails(DetailsBase): branch_name: str fast_forward: Literal[True] class RepoActionWriteChangelogs(TypedDict): action: Literal[RepoActionStep.WRITE_CHANGELOGS] details: RepoActionWriteChangelogsDetails class RepoActionWriteChangelogsDetails(DetailsBase): new_version: str max_version: NotRequired[str] dest_files: Sequence[RepoActionWriteChangelogsDestFile] class RepoActionWriteChangelogsDestFile(TypedDict): path: Path | str format: ChangelogOutputFormat class ConvertCommitSpecToCommitDefFn(Protocol): def __call__( self, commit_spec: CommitSpec, commit_type: CommitConvention ) -> CommitDef: ... class GetRepoDefinitionFn(Protocol): def __call__( self, commit_type: CommitConvention, hvcs_client_name: str = "github", hvcs_domain: str = EXAMPLE_HVCS_DOMAIN, tag_format_str: str | None = None, extra_configs: dict[str, TomlSerializableTypes] | None = None, mask_initial_release: bool = False, ) -> Sequence[RepoActions]: ... class BuildRepoFromDefinitionFn(Protocol): def __call__( self, dest_dir: Path | str, repo_construction_steps: Sequence[RepoActions], ) -> Sequence[RepoActions]: ... class BuiltRepoResult(TypedDict): definition: Sequence[RepoActions] repo: Repo class GetVersionsFromRepoBuildDefFn(Protocol): def __call__(self, repo_def: Sequence[RepoActions]) -> Sequence[str]: ... class ConvertCommitSpecsToCommitDefsFn(Protocol): def __call__( self, commits: Sequence[CommitSpec], commit_type: CommitConvention ) -> Sequence[CommitDef]: ... class BuildSpecificRepoFn(Protocol): def __call__( self, repo_name: str, commit_type: CommitConvention, dest_dir: Path ) -> Sequence[RepoActions]: ... RepoActions: TypeAlias = Union[ RepoActionConfigure, RepoActionMakeCommits, RepoActionRelease, RepoActionGitCheckout, RepoActionGitSquash, RepoActionWriteChangelogs, RepoActionGitMerge, ] class GetGitRepo4DirFn(Protocol): def __call__(self, directory: Path | str) -> Repo: ... class SplitRepoActionsByReleaseTagsFn(Protocol): def __call__( self, repo_definition: Sequence[RepoActions], tag_format_str: str ) -> dict[str, list[RepoActions]]: ... class GetCfgValueFromDefFn(Protocol): def __call__( self, build_definition: Sequence[RepoActions], key: str ) -> Any: ... class SeparateSquashedCommitDefFn(Protocol): def __call__(self, squashed_commit_def: CommitDef) -> list[CommitDef]: ... class GenerateDefaultReleaseNotesFromDefFn(Protocol): def __call__( self, version_actions: Sequence[RepoActions], hvcs: Github | Gitlab | Gitea | Bitbucket, previous_version: Version | None = None, license_name: str = "", dest_file: Path | None = None, mask_initial_release: bool = False, ) -> str: ... class GetHvcsClientFromRepoDefFn(Protocol): def __call__( self, repo_def: Sequence[RepoActions], ) -> Github | Gitlab | Gitea | Bitbucket: ... @pytest.fixture(scope="session") def deps_files_4_example_git_project( deps_files_4_example_project: list[Path], ) -> list[Path]: return [ *deps_files_4_example_project, # This file Path(__file__).absolute(), # because of imports Path(tests.const.__file__).absolute(), Path(tests.util.__file__).absolute(), # because of the fixtures Path(tests.conftest.__file__).absolute(), ] @pytest.fixture(scope="session") def build_spec_hash_4_example_git_project( get_md5_for_set_of_files: GetMd5ForSetOfFilesFn, deps_files_4_example_git_project: list[Path], ) -> str: # Generates a hash of the build spec to set when to invalidate the cache return get_md5_for_set_of_files(deps_files_4_example_git_project) @pytest.fixture(scope="session") def cached_example_git_project( build_repo_or_copy_cache: BuildRepoOrCopyCacheFn, build_spec_hash_4_example_git_project: str, cached_example_project: Path, example_git_https_url: str, commit_author: Actor, ) -> Path: """ Initializes an example project with git repo. DO NOT USE DIRECTLY. Use a `repo_*` fixture instead. This creates a default base repository, all settings can be changed later through from the example_project_git_repo fixture's return object and manual adjustment. """ def _build_repo(cached_repo_path: Path) -> Sequence[RepoActions]: if not cached_example_project.exists(): raise RuntimeError("Unable to find cached project files") # make a copy of the example project as a base copy_dir_tree(cached_example_project, cached_repo_path) # initialize git repo (open and close) # NOTE: We don't want to hold the repo object open for the entire test session, # the implementation on Windows holds some file descriptors open until close is called. with Repo.init(cached_repo_path) as repo: # Without this the global config may set it to "master", we want consistency repo.git.branch("-M", DEFAULT_BRANCH_NAME) with repo.config_writer("repository") as config: config.set_value("user", "name", commit_author.name) config.set_value("user", "email", commit_author.email) config.set_value("commit", "gpgsign", False) config.set_value("tag", "gpgsign", False) repo.create_remote(name="origin", url=example_git_https_url) # make sure all base files are in index to enable initial commit repo.index.add(("*", ".gitignore")) # This is a special build, we don't expose the Repo Actions to the caller return [] # End of _build_repo() return build_repo_or_copy_cache( repo_name=cached_example_git_project.__name__.split("_", maxsplit=1)[1], build_spec_hash=build_spec_hash_4_example_git_project, build_repo_func=_build_repo, ) @pytest.fixture(scope="session") def commit_author(): return Actor(name="semantic release testing", email="not_a_real@email.com") @pytest.fixture(scope="session") def default_tag_format_str() -> str: return "v{version}" @pytest.fixture(scope="session") def file_in_repo(): return "file.txt" @pytest.fixture(scope="session") def example_git_ssh_url(): return f"git@{EXAMPLE_HVCS_DOMAIN}:{EXAMPLE_REPO_OWNER}/{EXAMPLE_REPO_NAME}.git" @pytest.fixture(scope="session") def example_git_https_url(): return f"https://{EXAMPLE_HVCS_DOMAIN}/{EXAMPLE_REPO_OWNER}/{EXAMPLE_REPO_NAME}.git" @pytest.fixture(scope="session") def get_commit_def_of_conventional_commit( default_conventional_parser: ConventionalCommitParser, ) -> GetCommitDefFn: def _get_commit_def_of_conventional_commit(msg: str) -> CommitDef: if not (parsed_result := default_conventional_parser.parse_message(msg)): return { "msg": msg, "type": "unknown", "category": "Unknown", "desc": msg, "brking_desc": "", "scope": "", "mr": "", "sha": NULL_HEX_SHA, "include_in_changelog": False, } descriptions = list(parsed_result.descriptions) if parsed_result.linked_merge_request: descriptions[0] = str.join("(", descriptions[0].split("(")[:-1]).strip() return { "msg": msg, "type": parsed_result.type, "category": parsed_result.category, "desc": str.join("\n\n", descriptions), "brking_desc": str.join("\n\n", parsed_result.breaking_descriptions), "scope": parsed_result.scope, "mr": parsed_result.linked_merge_request, "sha": NULL_HEX_SHA, "include_in_changelog": True, } return _get_commit_def_of_conventional_commit @pytest.fixture(scope="session") def get_commit_def_of_emoji_commit( default_emoji_parser: EmojiCommitParser, ) -> GetCommitDefFn: def _get_commit_def_of_emoji_commit(msg: str) -> CommitDef: if not (parsed_result := default_emoji_parser.parse_message(msg)): return { "msg": msg, "type": "unknown", "category": "Other", "desc": msg, "brking_desc": "", "scope": "", "mr": "", "sha": NULL_HEX_SHA, "include_in_changelog": False, } descriptions = list(parsed_result.descriptions) if parsed_result.linked_merge_request: descriptions[0] = str.join("(", descriptions[0].split("(")[:-1]).strip() return { "msg": msg, "type": parsed_result.type, "category": parsed_result.category, "desc": str.join("\n\n", descriptions), "brking_desc": str.join("\n\n", parsed_result.breaking_descriptions), "scope": parsed_result.scope, "mr": parsed_result.linked_merge_request, "sha": NULL_HEX_SHA, "include_in_changelog": True, } return _get_commit_def_of_emoji_commit @pytest.fixture(scope="session") def get_commit_def_of_scipy_commit( default_scipy_parser: ScipyCommitParser, ) -> GetCommitDefFn: def _get_commit_def_of_scipy_commit(msg: str) -> CommitDef: if not (parsed_result := default_scipy_parser.parse_message(msg)): return { "msg": msg, "type": "unknown", "category": "Unknown", "desc": msg, "brking_desc": "", "scope": "", "mr": "", "sha": NULL_HEX_SHA, "include_in_changelog": False, } descriptions = list(parsed_result.descriptions) if parsed_result.linked_merge_request: descriptions[0] = str.join("(", descriptions[0].split("(")[:-1]).strip() return { "msg": msg, "type": parsed_result.type, "category": parsed_result.category, "desc": str.join("\n\n", descriptions), "brking_desc": str.join("\n\n", parsed_result.breaking_descriptions), "scope": parsed_result.scope, "mr": parsed_result.linked_merge_request, "sha": NULL_HEX_SHA, "include_in_changelog": True, } return _get_commit_def_of_scipy_commit @pytest.fixture(scope="session") def format_merge_commit_msg_git() -> FormatGitMergeCommitMsgFn: def _format_merge_commit_msg_git(branch_name: str, tgt_branch_name: str) -> str: return f"Merge branch '{branch_name}' into '{tgt_branch_name}'" return _format_merge_commit_msg_git @pytest.fixture(scope="session") def format_merge_commit_msg_github() -> FormatGitHubMergeCommitMsgFn: def _format_merge_commit_msg_git(pr_number: int, branch_name: str) -> str: return f"Merge pull request #{pr_number} from '{branch_name}'" return _format_merge_commit_msg_git @pytest.fixture(scope="session") def format_merge_commit_msg_gitlab() -> FormatGitLabMergeCommitMsgFn: def _format_merge_commit_msg( mr_title: str, mr_number: int, source_branch: str, target_branch: str, closed_issues: list[str], ) -> str: """REF: https://docs.gitlab.com/17.8/ee/user/project/merge_requests/commit_templates.html""" reference = f"{EXAMPLE_REPO_OWNER}/{EXAMPLE_REPO_NAME}!{mr_number}" issue_statement = ( "" if not closed_issues else str.join( " ", [ "Closes", str.join( " and ", [str.join(", ", closed_issues[:-1]), closed_issues[-1]] ) if len(closed_issues) > 1 else closed_issues[0], ], ) ) return str.join( "\n\n", filter( None, [ f"Merge branch '{source_branch}' into '{target_branch}'", f"{mr_title}", f"{issue_statement}", f"See merge request {reference}", ], ), ) return _format_merge_commit_msg @pytest.fixture(scope="session") def format_squash_commit_msg_git(commit_author: Actor) -> FormatGitSquashCommitMsgFn: def _format_squash_commit_msg_git( squashed_commits: list[CommitDef], ) -> str: return ( str.join( "\n\n", [ "Squashed commit of the following:", *[ str.join( "\n", [ f"commit {commit['sha']}", f"Author: {commit_author.name} <{commit_author.email}>", # TODO: get date from CommitDef object "Date: Day Mon DD HH:MM:SS YYYY +HHMM", "", *[f" {line}" for line in commit["msg"].split("\n")], ], ) for commit in squashed_commits ], ], ) + "\n" ) return _format_squash_commit_msg_git @pytest.fixture(scope="session") def format_squash_commit_msg_github() -> FormatGitHubSquashCommitMsgFn: def _format_squash_commit_msg_github( pr_title: str, pr_number: int, squashed_commits: list[CommitDef | str], ) -> str: sq_cmts: list[str] = ( squashed_commits # type: ignore[assignment] if len(squashed_commits) > 1 and not isinstance(squashed_commits[0], dict) else [commit["msg"] for commit in squashed_commits] # type: ignore[index] ) return ( str.join( "\n\n", [ f"{pr_title} (#{pr_number})", *[f"* {commit_str}" for commit_str in sq_cmts], ], ) + "\n" ) return _format_squash_commit_msg_github @pytest.fixture(scope="session") def format_squash_commit_msg_bitbucket() -> FormatBitBucketSquashCommitMsgFn: def _format_squash_commit_msg_bitbucket( branch_name: str, pr_title: str, pr_number: int, squashed_commits: list[CommitDef], ) -> str: # See #1085, for detail on BitBucket squash commit message format return ( str.join( "\n\n", [ f"Merged in {branch_name} (pull request #{pr_number})", f"{pr_title}", *[f"* {commit_str}" for commit_str in squashed_commits], ], ) + "\n" ) return _format_squash_commit_msg_bitbucket @pytest.fixture(scope="session") def create_merge_commit(stable_now_date: GetStableDateNowFn) -> CreateMergeCommitFn: def _create_merge_commit( git_repo: Repo, branch_name: str, commit_def: CommitDef, fast_forward: bool = True, ) -> CommitDef: curr_dt = stable_now_date() commit_dt = ( datetime.fromisoformat(commit_def["datetime"]) if "datetime" in commit_def else curr_dt ) timestamp = commit_dt.isoformat(timespec="seconds") if curr_dt == commit_dt: sleep(1) # ensure commit timestamps are unique with git_repo.git.custom_environment( GIT_AUTHOR_DATE=timestamp, GIT_COMMITTER_DATE=timestamp, ): git_repo.git.merge( branch_name, ff=fast_forward, no_ff=bool(not fast_forward), m=commit_def["msg"], ) # return the commit definition with the sha & message updated return { **commit_def, "msg": str(git_repo.head.commit.message).strip(), "sha": git_repo.head.commit.hexsha, } return _create_merge_commit @pytest.fixture(scope="session") def create_squash_merge_commit( stable_now_date: GetStableDateNowFn, ) -> CreateSquashMergeCommitFn: def _create_squash_merge_commit( git_repo: Repo, branch_name: str, commit_def: CommitDef, strategy_option: str = "theirs", ) -> CommitDef: curr_dt = stable_now_date() commit_dt = ( datetime.fromisoformat(commit_def["datetime"]) if "datetime" in commit_def else curr_dt ) if curr_dt == commit_dt: sleep(1) # ensure commit timestamps are unique # merge --squash never commits on action, first it stages the changes git_repo.git.merge( branch_name, squash=True, strategy_option=strategy_option, ) # commit the squashed changes git_repo.git.commit( m=commit_def["msg"], date=commit_dt.isoformat(timespec="seconds"), ) # return the commit definition with the sha & message updated return { **commit_def, "msg": str(git_repo.head.commit.message).strip(), "sha": git_repo.head.commit.hexsha, } return _create_squash_merge_commit @pytest.fixture(scope="session") def create_release_tagged_commit( update_pyproject_toml: UpdatePyprojectTomlFn, update_version_py_file: UpdateVersionPyFileFn, default_tag_format_str: str, stable_now_date: GetStableDateNowFn, ) -> CreateReleaseFn: def _mimic_semantic_release_commit( git_repo: Repo, version: str, tag_format: str = default_tag_format_str, timestamp: DatetimeISOStr | None = None, ) -> None: curr_dt = stable_now_date() commit_dt = ( datetime.fromisoformat(timestamp) if isinstance(timestamp, str) else curr_dt ) if curr_dt == commit_dt: sleep(1) # ensure commit timestamps are unique # stamp version into version file update_version_py_file(version) # stamp version into pyproject.toml update_pyproject_toml("tool.poetry.version", version) # commit --all files with version number commit message git_repo.git.commit( a=True, m=COMMIT_MESSAGE.format(version=version), date=commit_dt.isoformat(timespec="seconds"), ) # ensure commit timestamps are unique (adding one second even though a nanosecond has gone by) commit_dt += timedelta(seconds=1) with git_repo.git.custom_environment( GIT_COMMITTER_DATE=commit_dt.isoformat(timespec="seconds"), ): # tag commit with version number tag_str = tag_format.format(version=version) git_repo.git.tag(tag_str, m=tag_str) return _mimic_semantic_release_commit @pytest.fixture(scope="session") def commit_n_rtn_changelog_entry( stable_now_date: GetStableDateNowFn, ) -> CommitNReturnChangelogEntryFn: def _commit_n_rtn_changelog_entry( git_repo: Repo, commit_def: CommitDef ) -> CommitDef: # make commit with --all files curr_dt = stable_now_date() commit_dt = ( datetime.fromisoformat(commit_def["datetime"]) if "datetime" in commit_def else curr_dt ) if curr_dt == commit_dt: sleep(1) # ensure commit timestamps are unique git_repo.git.commit( a=True, m=commit_def["msg"], date=commit_dt.isoformat(timespec="seconds"), ) # Capture the resulting commit message and sha return { **commit_def, "msg": str(git_repo.head.commit.message).strip(), "sha": git_repo.head.commit.hexsha, } return _commit_n_rtn_changelog_entry @pytest.fixture(scope="session") def simulate_change_commits_n_rtn_changelog_entry( commit_n_rtn_changelog_entry: CommitNReturnChangelogEntryFn, file_in_repo: str, ) -> SimulateChangeCommitsNReturnChangelogEntryFn: def _simulate_change_commits_n_rtn_changelog_entry( git_repo: Repo, commit_msgs: Sequence[CommitDef] ) -> Sequence[CommitDef]: changelog_entries = [] for commit_msg in commit_msgs: add_text_to_file(git_repo, file_in_repo) changelog_entries.append(commit_n_rtn_changelog_entry(git_repo, commit_msg)) return changelog_entries return _simulate_change_commits_n_rtn_changelog_entry @pytest.fixture(scope="session") def get_hvcs_client_from_repo_def( example_git_https_url: str, get_cfg_value_from_def: GetCfgValueFromDefFn, ) -> GetHvcsClientFromRepoDefFn: hvcs_client_classes = { Bitbucket.__name__.lower(): Bitbucket, Github.__name__.lower(): Github, Gitea.__name__.lower(): Gitea, Gitlab.__name__.lower(): Gitlab, } def _get_hvcs_client_from_repo_def( repo_def: Sequence[RepoActions], ) -> Github | Gitlab | Gitea | Bitbucket: hvcs_type = get_cfg_value_from_def(repo_def, "hvcs_client_name") hvcs_client_class = hvcs_client_classes[hvcs_type] # Prevent the HVCS client from using the environment variables with mock.patch.dict(os.environ, {}, clear=True): return hvcs_client_class( example_git_https_url, hvcs_domain=get_cfg_value_from_def(repo_def, "hvcs_domain"), ) return _get_hvcs_client_from_repo_def @pytest.fixture(scope="session") def build_configured_base_repo( # noqa: C901 cached_example_git_project: Path, use_github_hvcs: UseHvcsFn, use_gitlab_hvcs: UseHvcsFn, use_gitea_hvcs: UseHvcsFn, use_bitbucket_hvcs: UseHvcsFn, use_conventional_parser: UseParserFn, use_emoji_parser: UseParserFn, use_scipy_parser: UseParserFn, use_custom_parser: UseCustomParserFn, example_git_https_url: str, update_pyproject_toml: UpdatePyprojectTomlFn, get_wheel_file: GetWheelFileFn, ) -> BuildRepoFn: """ This fixture is intended to simplify repo scenario building by initially creating the repo but also configuring semantic_release in the pyproject.toml for when the test executes semantic_release. It returns a function so that derivative fixtures can call this fixture with individual parameters. """ def _build_configured_base_repo( # noqa: C901 dest_dir: Path | str, commit_type: str = "conventional", hvcs_client_name: str = "github", hvcs_domain: str = EXAMPLE_HVCS_DOMAIN, tag_format_str: str | None = None, extra_configs: dict[str, TomlSerializableTypes] | None = None, mask_initial_release: bool = False, ) -> tuple[Path, HvcsBase]: if not cached_example_git_project.exists(): raise RuntimeError("Unable to find cached git project files!") # Copy the cached git project the dest directory copy_dir_tree(cached_example_git_project, dest_dir) # Make sure we are in the dest directory with temporary_working_directory(dest_dir): # Set parser configuration if commit_type == "conventional": use_conventional_parser() elif commit_type == "emoji": use_emoji_parser() elif commit_type == "scipy": use_scipy_parser() else: use_custom_parser(commit_type) # Set HVCS configuration if hvcs_client_name == "github": hvcs_class = use_github_hvcs(hvcs_domain) elif hvcs_client_name == "gitlab": hvcs_class = use_gitlab_hvcs(hvcs_domain) elif hvcs_client_name == "gitea": hvcs_class = use_gitea_hvcs(hvcs_domain) elif hvcs_client_name == "bitbucket": hvcs_class = use_bitbucket_hvcs(hvcs_domain) else: raise ValueError(f"Unknown HVCS client name: {hvcs_client_name}") # Create HVCS Client instance hvcs = hvcs_class(example_git_https_url, hvcs_domain=hvcs_domain) # Set tag format in configuration if tag_format_str is not None: update_pyproject_toml( "tool.semantic_release.tag_format", tag_format_str ) # Set the build_command to create a wheel file (using the build_command_env version variable) build_result_file = ( get_wheel_file("$NEW_VERSION") if sys.platform != "win32" else get_wheel_file("$Env:NEW_VERSION") ) update_pyproject_toml( # NOTE: must work in both bash and Powershell "tool.semantic_release.build_command", # NOTE: we are trying to ensure a few non-file-path characters are removed, but this is not # the equivalent of a cononcial version translator, so it may not work in all cases dedent( f"""\ mkdir -p "{build_result_file.parent}" WHEEL_FILE="$(printf '%s' "{build_result_file}" | sed 's/+/./g')" touch "$WHEEL_FILE" """ if sys.platform != "win32" else f"""\ mkdir {build_result_file.parent} > $null $WHEEL_FILE = "{build_result_file}".Replace('+', '.') New-Item -ItemType file -Path "$WHEEL_FILE" -Force | Select-Object OriginalPath """ ), ) # Set whether or not the initial release should be masked update_pyproject_toml( "tool.semantic_release.changelog.default_templates.mask_initial_release", mask_initial_release, ) # Apply configurations to pyproject.toml if extra_configs is not None: for key, value in extra_configs.items(): update_pyproject_toml(key, value) return Path(dest_dir), hvcs return _build_configured_base_repo @pytest.fixture(scope="session") def separate_squashed_commit_def( default_conventional_parser: ConventionalCommitParser, default_emoji_parser: EmojiCommitParser, default_scipy_parser: ScipyCommitParser, ) -> SeparateSquashedCommitDefFn: message_parsers: dict[ CommitConvention, ConventionalCommitParser | EmojiCommitParser | ScipyCommitParser, ] = { "conventional": ConventionalCommitParser( options=ConventionalCommitParserOptions( **{ **default_conventional_parser.options.__dict__, "parse_squash_commits": True, } ) ), "emoji": EmojiCommitParser( options=EmojiParserOptions( **{ **default_emoji_parser.options.__dict__, "parse_squash_commits": True, } ) ), "scipy": ScipyCommitParser( options=ScipyParserOptions( **{ **default_scipy_parser.options.__dict__, "parse_squash_commits": True, } ) ), } def _separate_squashed_commit_def( squashed_commit_def: CommitDef, ) -> list[CommitDef]: commit_type: CommitConvention = "conventional" for parser_name, parser in message_parsers.items(): if squashed_commit_def["type"] in parser.options.allowed_tags: commit_type = parser_name parser = message_parsers[commit_type] if not hasattr(parser, "unsquash_commit_message"): return [squashed_commit_def] unsquashed_messages = parser.unsquash_commit_message( message=squashed_commit_def["msg"] ) return [ { "msg": squashed_message, "type": parsed_result.type, "category": parsed_result.category, "desc": str.join( "\n\n", ( [ # Strip out any MR references (since v9 doesn't) to prep for changelog generatro # TODO: remove in v10, as the parser will remove the MR reference str.join( "(", parsed_result.descriptions[0].split("(")[:-1] ).strip(), *parsed_result.descriptions[1:], ] if parsed_result.linked_merge_request else [*parsed_result.descriptions] ), ), "brking_desc": str.join("\n\n", parsed_result.breaking_descriptions), "scope": parsed_result.scope, "mr": parsed_result.linked_merge_request or squashed_commit_def["mr"], "sha": squashed_commit_def["sha"], "include_in_changelog": True, "datetime": squashed_commit_def.get("datetime", ""), } for parsed_result, squashed_message in iter( (parser.parse_message(squashed_msg), squashed_msg) for squashed_msg in unsquashed_messages ) if parsed_result is not None ] return _separate_squashed_commit_def @pytest.fixture(scope="session") def convert_commit_spec_to_commit_def( get_commit_def_of_conventional_commit: GetCommitDefFn, get_commit_def_of_emoji_commit: GetCommitDefFn, get_commit_def_of_scipy_commit: GetCommitDefFn, stable_now_date: datetime, ) -> ConvertCommitSpecToCommitDefFn: message_parsers: dict[CommitConvention, GetCommitDefFn] = { "conventional": get_commit_def_of_conventional_commit, "emoji": get_commit_def_of_emoji_commit, "scipy": get_commit_def_of_scipy_commit, } def _convert( commit_spec: CommitSpec, commit_type: CommitConvention, ) -> CommitDef: parse_msg_fn = message_parsers[commit_type] # Extract the correct commit message for the commit type return { **parse_msg_fn(commit_spec[commit_type]), "datetime": ( commit_spec["datetime"] if "datetime" in commit_spec else stable_now_date.isoformat(timespec="seconds") ), "include_in_changelog": (commit_spec.get("include_in_changelog", True)), } return _convert @pytest.fixture(scope="session") def convert_commit_specs_to_commit_defs( convert_commit_spec_to_commit_def: ConvertCommitSpecToCommitDefFn, ) -> ConvertCommitSpecsToCommitDefsFn: def _convert( commits: Sequence[CommitSpec], commit_type: CommitConvention, ) -> Sequence[CommitDef]: return [ convert_commit_spec_to_commit_def(commit, commit_type) for commit in commits ] return _convert @pytest.fixture(scope="session") def build_repo_from_definition( # noqa: C901, its required and its just test code build_configured_base_repo: BuildRepoFn, default_tag_format_str: str, create_release_tagged_commit: CreateReleaseFn, create_squash_merge_commit: CreateSquashMergeCommitFn, create_merge_commit: CreateMergeCommitFn, simulate_change_commits_n_rtn_changelog_entry: SimulateChangeCommitsNReturnChangelogEntryFn, simulate_default_changelog_creation: SimulateDefaultChangelogCreationFn, separate_squashed_commit_def: SeparateSquashedCommitDefFn, ) -> BuildRepoFromDefinitionFn: def expand_repo_construction_steps( acc: Sequence[RepoActions], step: RepoActions ) -> Sequence[RepoActions]: return [ *acc, *( reduce( expand_repo_construction_steps, # type: ignore[arg-type] step["details"]["pre_actions"], [], ) if "pre_actions" in step["details"] else [] ), step, *( reduce( expand_repo_construction_steps, # type: ignore[arg-type] step["details"]["post_actions"], [], ) if "post_actions" in step["details"] else [] ), ] def _build_repo_from_definition( # noqa: C901, its required and its just test code dest_dir: Path | str, repo_construction_steps: Sequence[RepoActions] ) -> Sequence[RepoActions]: completed_repo_steps: list[RepoActions] = [] expanded_repo_construction_steps: Sequence[RepoActions] = reduce( expand_repo_construction_steps, repo_construction_steps, [], ) repo_dir = Path(dest_dir) hvcs: Github | Gitlab | Gitea | Bitbucket tag_format_str: str mask_initial_release: bool = False current_commits: list[CommitDef] = [] current_repo_def: RepoDefinition = {} with temporary_working_directory(repo_dir): for step in expanded_repo_construction_steps: step_result = deepcopy(step) action = step["action"] if action == RepoActionStep.CONFIGURE: cfg_def: RepoActionConfigureDetails = step_result["details"] # type: ignore[assignment] # Make sure the resulting build definition is complete with the default tag_format_str = cfg_def["tag_format_str"] or default_tag_format_str cfg_def["tag_format_str"] = tag_format_str _, hvcs = build_configured_base_repo( # type: ignore[assignment] # TODO: fix the type error dest_dir, **{ key: cfg_def[key] # type: ignore[literal-required] for key in [ "commit_type", "hvcs_client_name", "hvcs_domain", "tag_format_str", "mask_initial_release", "extra_configs", ] }, ) # Save configuration details for later steps mask_initial_release = cfg_def["mask_initial_release"] # Make sure the resulting build definition is complete with the default cfg_def["tag_format_str"] = tag_format_str elif action == RepoActionStep.MAKE_COMMITS: mk_cmts_def: RepoActionMakeCommitsDetails = step_result["details"] # type: ignore[assignment] # update the commit definitions with the repo hashes with Repo(repo_dir) as git_repo: mk_cmts_def["commits"] = ( simulate_change_commits_n_rtn_changelog_entry( git_repo, mk_cmts_def["commits"], ) ) current_commits.extend( filter( lambda commit: commit["include_in_changelog"], mk_cmts_def["commits"], ) ) elif action == RepoActionStep.WRITE_CHANGELOGS: w_chlgs_def: RepoActionWriteChangelogsDetails = step["details"] # type: ignore[assignment] # Mark the repo definition with the latest stored commits for the upcoming release new_version = w_chlgs_def["new_version"] current_repo_def.update( {new_version: {"commits": [*current_commits]}} ) current_commits.clear() # Write each changelog with the current repo definition for changelog_file_def in w_chlgs_def["dest_files"]: simulate_default_changelog_creation( current_repo_def, hvcs=hvcs, dest_file=repo_dir.joinpath(changelog_file_def["path"]), output_format=changelog_file_def["format"], mask_initial_release=mask_initial_release, max_version=w_chlgs_def.get("max_version", None), ) elif action == RepoActionStep.RELEASE: release_def: RepoActionReleaseDetails = step["details"] # type: ignore[assignment] with Repo(repo_dir) as git_repo: create_release_tagged_commit( git_repo, version=release_def["version"], tag_format=tag_format_str, timestamp=release_def["datetime"], ) elif action == RepoActionStep.GIT_CHECKOUT: ckout_def: RepoActionGitCheckoutDetails = step["details"] # type: ignore[assignment] with Repo(repo_dir) as git_repo: if "create_branch" in ckout_def: create_branch_def: RepoActionGitCheckoutCreateBranch = ( ckout_def["create_branch"] ) start_head = git_repo.heads[ create_branch_def["start_branch"] ] new_branch_head = git_repo.create_head( create_branch_def["name"], commit=start_head.commit, ) new_branch_head.checkout() elif "branch" in ckout_def: git_repo.heads[ckout_def["branch"]].checkout() elif action == RepoActionStep.GIT_SQUASH: squash_def: RepoActionGitSquashDetails = step_result["details"] # type: ignore[assignment] # Update the commit definition with the repo hash with Repo(repo_dir) as git_repo: squash_def["commit_def"] = create_squash_merge_commit( git_repo=git_repo, branch_name=squash_def["branch"], commit_def=squash_def["commit_def"], strategy_option=squash_def["strategy_option"], ) if squash_def["commit_def"]["include_in_changelog"]: current_commits.extend( separate_squashed_commit_def( squashed_commit_def=squash_def["commit_def"], ) ) elif action == RepoActionStep.GIT_MERGE: this_step: RepoActionGitMerge = step_result # type: ignore[assignment] with Repo(repo_dir) as git_repo: if this_step["details"]["fast_forward"]: ff_merge_def: RepoActionGitFFMergeDetails = this_step[ # type: ignore[assignment] "details" ] git_repo.git.merge(ff_merge_def["branch_name"], ff=True) else: merge_def: RepoActionGitMergeDetails = this_step[ # type: ignore[assignment] "details" ] # Update the commit definition with the repo hash merge_def["commit_def"] = create_merge_commit( git_repo=git_repo, branch_name=merge_def["branch_name"], commit_def=merge_def["commit_def"], fast_forward=merge_def["fast_forward"], ) if merge_def["commit_def"]["include_in_changelog"]: current_commits.append(merge_def["commit_def"]) else: raise ValueError(f"Unknown action: {action}") completed_repo_steps.append(step_result) return completed_repo_steps return _build_repo_from_definition @pytest.fixture(scope="session") def get_cfg_value_from_def() -> GetCfgValueFromDefFn: def _get_cfg_value_from_def( build_definition: Sequence[RepoActions], key: str ) -> Any: configure_steps = [ step for step in build_definition if step["action"] == RepoActionStep.CONFIGURE ] for step in configure_steps[::-1]: if key in step["details"]: return step["details"][key] # type: ignore[literal-required] raise ValueError(f"Unable to find configuration key: {key}") return _get_cfg_value_from_def @pytest.fixture(scope="session") def get_versions_from_repo_build_def() -> GetVersionsFromRepoBuildDefFn: def _get_versions(repo_def: Sequence[RepoActions]) -> Sequence[str]: return [ step["details"]["version"] for step in repo_def if step["action"] == RepoActionStep.RELEASE ] return _get_versions @pytest.fixture(scope="session") def get_commits_from_repo_build_def() -> GetCommitsFromRepoBuildDefFn: def _get_commits( build_definition: Sequence[RepoActions], filter_4_changelog: bool = False, ) -> RepoDefinition: # Extract the commits from the build definition repo_def: RepoDefinition = {} commits: list[CommitDef] = [] for build_step in build_definition: if build_step["action"] == RepoActionStep.MAKE_COMMITS: commits_made = deepcopy(build_step["details"]["commits"]) if filter_4_changelog: commits_made = list( filter( lambda commit: commit["include_in_changelog"], commits_made ) ) commits.extend(commits_made) elif any( ( build_step["action"] == RepoActionStep.GIT_SQUASH, build_step["action"] == RepoActionStep.GIT_MERGE, ) ): if "commit_def" in build_step["details"]: commit_def = build_step["details"]["commit_def"] # type: ignore[typeddict-item] if filter_4_changelog and not commit_def["include_in_changelog"]: continue commits.append(commit_def) elif build_step["action"] == RepoActionStep.RELEASE: version = build_step["details"]["version"] repo_def[version] = {"commits": [*commits]} commits.clear() # Any remaining commits are considered unreleased if len(commits) > 0: repo_def["Unreleased"] = {"commits": [*commits]} return repo_def return _get_commits @pytest.fixture(scope="session") def split_repo_actions_by_release_tags( get_versions_from_repo_build_def: GetVersionsFromRepoBuildDefFn, ) -> SplitRepoActionsByReleaseTagsFn: def _split_repo_actions_by_release_tags( repo_definition: Sequence[RepoActions], tag_format_str: str, ) -> dict[str, list[RepoActions]]: releasetags_2_steps: dict[str, list[RepoActions]] = { "": [], } # Create generator for next release tags next_release_tag_gen = ( tag_format_str.format(version=version) for version in get_versions_from_repo_build_def(repo_definition) ) # initialize the first release tag curr_release_tag = next(next_release_tag_gen) releasetags_2_steps[curr_release_tag] = [] # Loop through all actions and split them by release tags for step in repo_definition: if step["action"] == RepoActionStep.CONFIGURE: releasetags_2_steps[""].append(step) continue if step["action"] == RepoActionStep.WRITE_CHANGELOGS: continue releasetags_2_steps[curr_release_tag].append(step) if step["action"] == RepoActionStep.RELEASE: try: curr_release_tag = next(next_release_tag_gen) releasetags_2_steps[curr_release_tag] = [] except StopIteration: curr_release_tag = "Unreleased" releasetags_2_steps[curr_release_tag] = [] # Run filter on any non-action steps of Unreleased releasetags_2_steps["Unreleased"] = list( filter( lambda step: step["action"] != RepoActionStep.GIT_CHECKOUT, releasetags_2_steps["Unreleased"], ) ) # Remove Unreleased if there are no steps in an Unreleased section if ( "Unreleased" in releasetags_2_steps and not releasetags_2_steps["Unreleased"] ): del releasetags_2_steps["Unreleased"] # Return all actions split up by release tags return releasetags_2_steps return _split_repo_actions_by_release_tags @pytest.fixture(scope="session") def simulate_default_changelog_creation( # noqa: C901 default_md_changelog_insertion_flag: str, default_rst_changelog_insertion_flag: str, today_date_str: str, ) -> SimulateDefaultChangelogCreationFn: def reduce_repo_def( acc: BaseAccumulatorVersionReduction, ver_2_def: tuple[str, RepoVersionDef] ) -> BaseAccumulatorVersionReduction: version_str, version_def = ver_2_def if Version.parse(version_str) <= acc["version_limit"]: acc["repo_def"][version_str] = version_def return acc def build_version_entry_markdown( version: VersionStr, version_def: RepoVersionDef, hvcs: Github | Gitlab | Gitea | Bitbucket, ) -> str: version_entry = [ f"## {version}\n" if version == "Unreleased" else f"## v{version} ({today_date_str})\n" ] changelog_sections = sorted( {commit["category"] for commit in version_def["commits"]} ) brking_descriptions = [] for section in changelog_sections: # Create Markdown section heading section_title = section.title() if not section.startswith(":") else section version_entry.append(f"### {section_title}\n") commits: list[CommitDef] = list( filter( lambda commit, section=section: ( # type: ignore[arg-type] commit["category"] == section ), version_def["commits"], ) ) section_bullets = [] # format each commit for commit_def in commits: descriptions = commit_def["desc"].split("\n\n") if commit_def["brking_desc"]: brking_descriptions.append( "- {commit_scope}{brk_desc}".format( commit_scope=( f"**{commit_def['scope']}**: " if commit_def["scope"] else "" ), brk_desc=commit_def["brking_desc"].capitalize(), ) ) # NOTE: We have to be wary of the line length as the default changelog # has a 100 character limit or otherwise our tests will fail because the # URLs and whitespace don't line up subject_line = "- {commit_scope}{commit_desc}".format( commit_desc=descriptions[0].capitalize(), commit_scope=( f"**{commit_def['scope']}**: " if commit_def["scope"] else "" ), ) mr_link = ( "" if not commit_def["mr"] else "([{mr}]({mr_url}),".format( mr=commit_def["mr"], mr_url=hvcs.pull_request_url(commit_def["mr"]), ) ) sha_link = "[`{short_sha}`]({commit_url}))".format( short_sha=commit_def["sha"][:7], commit_url=hvcs.commit_hash_url(commit_def["sha"]), ) # Add opening parenthesis if no MR link sha_link = sha_link if mr_link else f"({sha_link}" # NOTE: we are assuming that the subject line is always less than 100 characters commit_cl_desc = f"{subject_line} {mr_link}".rstrip() if len(commit_cl_desc) > 100: commit_cl_desc = f"{subject_line}\n {mr_link}".rstrip() if len(f"{commit_cl_desc} {sha_link}") > 100: commit_cl_desc = f"{commit_cl_desc}\n {sha_link}\n" else: commit_cl_desc = f"{commit_cl_desc} {sha_link}\n" if len(descriptions) > 1: commit_cl_desc += ( "\n" + str.join("\n\n", [*descriptions[1:]]) + "\n" ) # Add commits to section if commit_cl_desc not in section_bullets: section_bullets.append(commit_cl_desc) version_entry.extend(sorted(section_bullets)) # Add breaking changes to the end of the version entry if brking_descriptions: version_entry.append("### Breaking Changes\n") version_entry.extend([*sorted(brking_descriptions), ""]) return str.join("\n", version_entry) def build_version_entry_restructured_text( version: VersionStr, version_def: RepoVersionDef, hvcs: Github | Gitlab | Gitea | Bitbucket, ) -> str: version_entry = [ ( ".. _changelog-unreleased:" if version == "Unreleased" else f".. _changelog-v{version}:" ), "", ( f"{version}" if version == "Unreleased" else f"v{version} ({today_date_str})" ), ] version_entry.append("=" * len(version_entry[-1])) version_entry.append("") # Add newline changelog_sections = sorted( {commit["category"] for commit in version_def["commits"]} ) brking_descriptions = [] urls = [] for section in changelog_sections: # Create RestructuredText section heading section_title = section.title() if not section.startswith(":") else section version_entry.append(f"{section_title}") version_entry.append("-" * (len(version_entry[-1])) + "\n") # Filter commits by section commits: list[CommitDef] = list( filter( lambda commit, section=section: ( # type: ignore[arg-type] commit["category"] == section ), version_def["commits"], ) ) section_bullets = [] for commit_def in commits: descriptions = commit_def["desc"].split("\n\n") if commit_def["brking_desc"]: brking_descriptions.append( "* {commit_scope}{brk_desc}".format( commit_scope=( f"**{commit_def['scope']}**: " if commit_def["scope"] else "" ), brk_desc=commit_def["brking_desc"].capitalize(), ) ) # NOTE: We have to be wary of the line length as the default changelog # has a 100 character limit or otherwise our tests will fail because the # URLs and whitespace don't line up subject_line = "* {commit_scope}{commit_desc}".format( commit_desc=descriptions[0].capitalize(), commit_scope=( f"**{commit_def['scope']}**: " if commit_def["scope"] else "" ), ) mr_link = ( "" if not commit_def["mr"] else "(`{mr}`_,".format( mr=commit_def["mr"], ) ) sha_link = "`{short_sha}`_)".format( short_sha=commit_def["sha"][:7], ) # Add opening parenthesis if no MR link sha_link = sha_link if mr_link else f"({sha_link}" # NOTE: we are assuming that the subject line is always less than 100 characters commit_cl_desc = f"{subject_line} {mr_link}".rstrip() if len(commit_cl_desc) > 100: commit_cl_desc = f"{subject_line}\n {mr_link}".rstrip() if len(f"{commit_cl_desc} {sha_link}") > 100: commit_cl_desc = f"{commit_cl_desc}\n {sha_link}\n" else: commit_cl_desc = f"{commit_cl_desc} {sha_link}\n" if len(descriptions) > 1: commit_cl_desc += ( "\n" + str.join("\n\n", [*descriptions[1:]]) + "\n" ) # Add commits to section if commit_cl_desc not in section_bullets: section_bullets.append(commit_cl_desc) version_entry.extend(sorted(section_bullets)) urls.extend( [ *[ ".. _{mr}: {mr_url}".format( mr=commit_def["mr"], mr_url=hvcs.pull_request_url(commit_def["mr"]), ) for commit_def in commits if commit_def["mr"] ], *[ ".. _{short_sha}: {commit_url}".format( short_sha=commit_def["sha"][:7], commit_url=hvcs.commit_hash_url(commit_def["sha"]), ) for commit_def in commits ], ] ) # Add breaking changes to the end of the version entry if brking_descriptions: version_entry.append("Breaking Changes") version_entry.append("-" * len(version_entry[-1]) + "\n") version_entry.extend([*sorted(brking_descriptions), ""]) # Add commit URLs to the end of the version entry version_entry.extend(sorted(set(urls))) if version_entry[-1] == "": version_entry.pop() return str.join("\n", version_entry) + "\n" def build_version_entry( version: VersionStr, version_def: RepoVersionDef, output_format: ChangelogOutputFormat, hvcs: Github | Gitlab | Gitea | Bitbucket, ) -> str: output_functions = { ChangelogOutputFormat.MARKDOWN: build_version_entry_markdown, ChangelogOutputFormat.RESTRUCTURED_TEXT: build_version_entry_restructured_text, } return output_functions[output_format](version, version_def, hvcs) def build_initial_version_entry( version: VersionStr, version_def: RepoVersionDef, output_format: ChangelogOutputFormat, hvcs: Github | Gitlab | Gitea | Bitbucket, ) -> str: if output_format == ChangelogOutputFormat.MARKDOWN: return str.join( "\n", [ f"## v{version} ({today_date_str})", "", "- Initial Release", "", ], ) if output_format == ChangelogOutputFormat.RESTRUCTURED_TEXT: title = f"v{version} ({today_date_str})" return str.join( "\n", [ f".. _changelog-v{version}:", "", title, "=" * len(title), "", "* Initial Release", "", ], ) raise ValueError(f"Unknown output format: {output_format}") def _mimic_semantic_release_default_changelog( repo_definition: RepoDefinition, hvcs: Github | Gitlab | Gitea | Bitbucket, dest_file: Path | None = None, max_version: str | None = None, output_format: ChangelogOutputFormat = ChangelogOutputFormat.MARKDOWN, # TODO: Breaking v10, when default is toggled to true, also change this to True mask_initial_release: bool = False, ) -> str: if output_format == ChangelogOutputFormat.MARKDOWN: header = dedent( f"""\ # CHANGELOG {default_md_changelog_insertion_flag} """ ).rstrip() elif output_format == ChangelogOutputFormat.RESTRUCTURED_TEXT: universal_newline_insertion_flag = ( default_rst_changelog_insertion_flag.replace("\r", "") ) header = str.join( "\n\n", [ dedent( """\ .. _changelog: ========= CHANGELOG ========= """ ).rstrip(), universal_newline_insertion_flag, ], ) else: raise ValueError(f"Unknown output format: {output_format}") version_entries: list[str] = [] repo_def: RepoDefinition = ( repo_definition # type: ignore[assignment] if max_version is None else reduce( reduce_repo_def, # type: ignore[arg-type] repo_definition.items(), { "version_limit": Version.parse(max_version), "repo_def": {}, }, )["repo_def"] ) for i, (version, version_def) in enumerate(repo_def.items()): # prepend entries to force reverse ordering entry = ( build_initial_version_entry(version, version_def, output_format, hvcs) if i == 0 and mask_initial_release and version != "Unreleased" else build_version_entry(version, version_def, output_format, hvcs) ) version_entries.insert(0, entry) changelog_content = ( str.join( "\n" * 2, [header, str.join("\n" * 2, list(version_entries))] ).rstrip() + "\n" ) if dest_file is not None: # Converts uninversal newlines to the OS-specific upon write dest_file.write_text(changelog_content) return changelog_content return _mimic_semantic_release_default_changelog @pytest.fixture(scope="session") def generate_default_release_notes_from_def( # noqa: C901 today_date_str: str, get_commits_from_repo_build_def: GetCommitsFromRepoBuildDefFn, ) -> GenerateDefaultReleaseNotesFromDefFn: def build_version_entry_markdown( version: VersionStr, version_def: RepoVersionDef, hvcs: Github | Gitlab | Gitea | Bitbucket, license_name: str, ) -> str: version_entry = [ f"## v{version} ({today_date_str})", *( [""] if not license_name else [ "", f"_This release is published under the {license_name} License._", "", ] ), ] changelog_sections = sorted( {commit["category"] for commit in version_def["commits"]} ) brking_descriptions = [] for section in changelog_sections: # Create Markdown section heading section_title = section.title() if not section.startswith(":") else section version_entry.append(f"### {section_title}\n") commits: list[CommitDef] = list( filter( lambda commit, section=section: ( # type: ignore[arg-type] commit["category"] == section ), version_def["commits"], ) ) section_bullets = [] # format each commit for commit_def in commits: descriptions = commit_def["desc"].split("\n\n") if commit_def["brking_desc"]: brking_descriptions.append( "- {commit_scope}{brk_desc}".format( commit_scope=( f"**{commit_def['scope']}**: " if commit_def["scope"] else "" ), brk_desc=commit_def["brking_desc"].capitalize(), ) ) # NOTE: During release notes, we make the line length very large as the VCS # will handle the line wrapping for us so here we don't have to worry about it max_line_length = 1000 subject_line = "- {commit_scope}{commit_desc}".format( commit_desc=descriptions[0].capitalize(), commit_scope=( f"**{commit_def['scope']}**: " if commit_def["scope"] else "" ), ) mr_link = ( "" if not commit_def["mr"] else "([{mr}]({mr_url}),".format( mr=commit_def["mr"], mr_url=hvcs.pull_request_url(commit_def["mr"]), ) ) sha_link = "[`{short_sha}`]({commit_url}))".format( short_sha=commit_def["sha"][:7], commit_url=hvcs.commit_hash_url(commit_def["sha"]), ) # Add opening parenthesis if no MR link sha_link = sha_link if mr_link else f"({sha_link}" commit_cl_desc = f"{subject_line} {mr_link}".rstrip() if len(commit_cl_desc) > max_line_length: commit_cl_desc = f"{subject_line}\n {mr_link}".rstrip() if len(f"{commit_cl_desc} {sha_link}") > max_line_length: commit_cl_desc = f"{commit_cl_desc}\n {sha_link}\n" else: commit_cl_desc = f"{commit_cl_desc} {sha_link}\n" # NOTE: remove this when we no longer are writing the whole commit msg (squash commits enabled) # if len(descriptions) > 1: # commit_cl_desc += ( # "\n" + str.join("\n\n", [*descriptions[1:]]) + "\n" # ) # Add commits to section section_bullets.append(commit_cl_desc) version_entry.extend(sorted(section_bullets)) # Add breaking changes to the end of the version entry if brking_descriptions: version_entry.append("### Breaking Changes\n") version_entry.extend([*sorted(brking_descriptions), ""]) return str.join("\n", version_entry) def build_initial_version_entry_markdown( version: VersionStr, license_name: str = "", ) -> str: return str.join( "\n", [ f"## v{version} ({today_date_str})", *( [""] if not license_name else [ "", f"_This release is published under the {license_name} License._", "", ] ), "- Initial Release", "", ], ) def _generate_default_release_notes( version_actions: Sequence[RepoActions], hvcs: Github | Gitlab | Gitea | Bitbucket, previous_version: Version | None = None, license_name: str = "", dest_file: Path | None = None, # TODO: Breaking v10, when default is toggled to true, also change this to True mask_initial_release: bool = False, ) -> str: limited_repo_def: RepoDefinition = get_commits_from_repo_build_def( build_definition=version_actions, filter_4_changelog=True, ) version: Version = Version.parse(next(iter(limited_repo_def.keys()))) version_def: RepoVersionDef = limited_repo_def[str(version)] release_notes_content = ( str.join( "\n" * 2, [ ( build_initial_version_entry_markdown(str(version), license_name) if mask_initial_release and not previous_version else build_version_entry_markdown( str(version), version_def, hvcs, license_name ) ).rstrip(), *( [ "---", "**Detailed Changes**: [{prev_version}...{new_version}]({version_compare_url})".format( prev_version=previous_version.as_tag(), new_version=version.as_tag(), version_compare_url=hvcs.compare_url( previous_version.as_tag(), version.as_tag() ), ), ] if previous_version and not isinstance(hvcs, Gitea) else [] ), ], ).rstrip() + "\n" ) if dest_file is not None: # Converts universal newlines to the OS-specific upon write dest_file.write_text(release_notes_content) # match the line endings of the current OS return ( str.join(os.linesep, release_notes_content.splitlines(keepends=False)) + os.linesep ) return _generate_default_release_notes @pytest.fixture def git_repo_for_directory() -> Generator[GetGitRepo4DirFn, None, None]: repos: list[Repo] = [] # Must be a callable function to ensure files exist before repo is opened def _git_repo_4_dir(directory: Path | str) -> Repo: if not Path(directory).exists(): raise RuntimeError("Unable to find git project!") repo = Repo(directory) repos.append(repo) return repo try: yield _git_repo_4_dir finally: for repo in repos: repo.close() @pytest.fixture def example_project_git_repo( example_project_dir: ExProjectDir, git_repo_for_directory: GetGitRepo4DirFn, ) -> ExProjectGitRepoFn: def _example_project_git_repo() -> Repo: return git_repo_for_directory(example_project_dir) return _example_project_git_repo python-semantic-release-9.21.0/tests/fixtures/repos/000077500000000000000000000000001475670435200225235ustar00rootroot00000000000000python-semantic-release-9.21.0/tests/fixtures/repos/__init__.py000066400000000000000000000003051475670435200246320ustar00rootroot00000000000000from tests.fixtures.repos.git_flow import * from tests.fixtures.repos.github_flow import * from tests.fixtures.repos.repo_initial_commit import * from tests.fixtures.repos.trunk_based_dev import * python-semantic-release-9.21.0/tests/fixtures/repos/git_flow/000077500000000000000000000000001475670435200243355ustar00rootroot00000000000000python-semantic-release-9.21.0/tests/fixtures/repos/git_flow/__init__.py000066400000000000000000000004271475670435200264510ustar00rootroot00000000000000from tests.fixtures.repos.git_flow.repo_w_1_release_channel import * from tests.fixtures.repos.git_flow.repo_w_2_release_channels import * from tests.fixtures.repos.git_flow.repo_w_3_release_channels import * from tests.fixtures.repos.git_flow.repo_w_4_release_channels import * python-semantic-release-9.21.0/tests/fixtures/repos/git_flow/repo_w_1_release_channel.py000066400000000000000000001023621475670435200316160ustar00rootroot00000000000000from __future__ import annotations from datetime import timedelta from itertools import count from pathlib import Path from typing import TYPE_CHECKING import pytest from semantic_release.cli.config import ChangelogOutputFormat import tests.conftest import tests.const import tests.util from tests.const import ( DEFAULT_BRANCH_NAME, EXAMPLE_HVCS_DOMAIN, INITIAL_COMMIT_MESSAGE, RepoActionStep, ) if TYPE_CHECKING: from typing import Sequence from tests.conftest import ( GetCachedRepoDataFn, GetMd5ForSetOfFilesFn, GetStableDateNowFn, ) from tests.fixtures.example_project import ExProjectDir from tests.fixtures.git_repo import ( BuildRepoFromDefinitionFn, BuildRepoOrCopyCacheFn, BuildSpecificRepoFn, BuiltRepoResult, CommitConvention, ConvertCommitSpecsToCommitDefsFn, ConvertCommitSpecToCommitDefFn, ExProjectGitRepoFn, FormatGitMergeCommitMsgFn, GetRepoDefinitionFn, RepoActionGitMerge, RepoActions, RepoActionWriteChangelogsDestFile, TomlSerializableTypes, ) DEV_BRANCH_NAME = "dev" FEAT_BRANCH_1_NAME = "feat/feature-1" FEAT_BRANCH_2_NAME = "feat/feature-2" FEAT_BRANCH_3_NAME = "feat/feature-3" FEAT_BRANCH_4_NAME = "feat/feature-4" FIX_BRANCH_1_NAME = "fix/patch-1" FIX_BRANCH_2_NAME = "fix/patch-2" @pytest.fixture(scope="session") def deps_files_4_git_flow_repo_w_1_release_channels( deps_files_4_example_git_project: list[Path], ) -> list[Path]: return [ *deps_files_4_example_git_project, # This file Path(__file__).absolute(), # because of imports Path(tests.const.__file__).absolute(), Path(tests.util.__file__).absolute(), # because of the fixtures Path(tests.conftest.__file__).absolute(), ] @pytest.fixture(scope="session") def build_spec_hash_4_git_flow_repo_w_1_release_channels( get_md5_for_set_of_files: GetMd5ForSetOfFilesFn, deps_files_4_git_flow_repo_w_1_release_channels: list[Path], ) -> str: # Generates a hash of the build spec to set when to invalidate the cache return get_md5_for_set_of_files(deps_files_4_git_flow_repo_w_1_release_channels) @pytest.fixture(scope="session") def get_repo_definition_4_git_flow_repo_w_1_release_channels( convert_commit_specs_to_commit_defs: ConvertCommitSpecsToCommitDefsFn, convert_commit_spec_to_commit_def: ConvertCommitSpecToCommitDefFn, format_merge_commit_msg_git: FormatGitMergeCommitMsgFn, changelog_md_file: Path, changelog_rst_file: Path, stable_now_date: GetStableDateNowFn, ) -> GetRepoDefinitionFn: """ This fixture returns a function that when called will define the actions needed to build a git repo that uses the git flow branching strategy and git merge commits with a single release channel 1. official (production) releases (x.x.x) """ def _get_repo_from_defintion( commit_type: CommitConvention, hvcs_client_name: str = "github", hvcs_domain: str = EXAMPLE_HVCS_DOMAIN, tag_format_str: str | None = None, extra_configs: dict[str, TomlSerializableTypes] | None = None, mask_initial_release: bool = False, ) -> Sequence[RepoActions]: stable_now_datetime = stable_now_date() commit_timestamp_gen = ( (stable_now_datetime + timedelta(seconds=i)).isoformat(timespec="seconds") for i in count(step=1) ) # Common static actions or components changelog_file_definitons: Sequence[RepoActionWriteChangelogsDestFile] = [ { "path": changelog_md_file, "format": ChangelogOutputFormat.MARKDOWN, }, { "path": changelog_rst_file, "format": ChangelogOutputFormat.RESTRUCTURED_TEXT, }, ] fast_forward_dev_branch_actions: Sequence[RepoActions] = [ { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": DEV_BRANCH_NAME}, }, { "action": RepoActionStep.GIT_MERGE, "details": { "branch_name": DEFAULT_BRANCH_NAME, "fast_forward": True, }, }, ] merge_dev_into_main: RepoActionGitMerge = { "action": RepoActionStep.GIT_MERGE, "details": { "branch_name": DEV_BRANCH_NAME, "fast_forward": False, "commit_def": convert_commit_spec_to_commit_def( { "conventional": format_merge_commit_msg_git( branch_name=DEV_BRANCH_NAME, tgt_branch_name=DEFAULT_BRANCH_NAME, ), "emoji": format_merge_commit_msg_git( branch_name=DEV_BRANCH_NAME, tgt_branch_name=DEFAULT_BRANCH_NAME, ), "scipy": format_merge_commit_msg_git( branch_name=DEV_BRANCH_NAME, tgt_branch_name=DEFAULT_BRANCH_NAME, ), "datetime": next(commit_timestamp_gen), "include_in_changelog": bool(commit_type == "emoji"), }, commit_type, ), }, } # Define All the steps required to create the repository repo_construction_steps: list[RepoActions] = [] repo_construction_steps.append( { "action": RepoActionStep.CONFIGURE, "details": { "commit_type": commit_type, "hvcs_client_name": hvcs_client_name, "hvcs_domain": hvcs_domain, "tag_format_str": tag_format_str, "mask_initial_release": mask_initial_release, "extra_configs": { # Set the default release branch "tool.semantic_release.branches.main": { "match": r"^(main|master)$", "prerelease": False, }, "tool.semantic_release.allow_zero_version": True, "tool.semantic_release.major_on_zero": True, **(extra_configs or {}), }, }, } ) # Make initial release new_version = "0.1.0" repo_construction_steps.extend( [ { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": [ # only one commit to start the main branch convert_commit_spec_to_commit_def( { "conventional": INITIAL_COMMIT_MESSAGE, "emoji": INITIAL_COMMIT_MESSAGE, "scipy": INITIAL_COMMIT_MESSAGE, "datetime": next(commit_timestamp_gen), "include_in_changelog": bool( commit_type == "emoji" ), }, commit_type, ), ], }, }, { "action": RepoActionStep.GIT_CHECKOUT, "details": { "create_branch": { "name": DEV_BRANCH_NAME, "start_branch": DEFAULT_BRANCH_NAME, }, }, }, { "action": RepoActionStep.GIT_CHECKOUT, "details": { "create_branch": { "name": FEAT_BRANCH_1_NAME, "start_branch": DEV_BRANCH_NAME, }, }, }, { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "conventional": "feat: add new feature", "emoji": ":sparkles: add new feature", "scipy": "ENH: add new feature", "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, ), }, }, { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": DEV_BRANCH_NAME}, }, { "action": RepoActionStep.GIT_MERGE, "details": { "branch_name": FEAT_BRANCH_1_NAME, "fast_forward": False, "commit_def": convert_commit_spec_to_commit_def( { "conventional": format_merge_commit_msg_git( branch_name=FEAT_BRANCH_1_NAME, tgt_branch_name=DEV_BRANCH_NAME, ), "emoji": format_merge_commit_msg_git( branch_name=FEAT_BRANCH_1_NAME, tgt_branch_name=DEV_BRANCH_NAME, ), "scipy": format_merge_commit_msg_git( branch_name=FEAT_BRANCH_1_NAME, tgt_branch_name=DEV_BRANCH_NAME, ), "datetime": next(commit_timestamp_gen), "include_in_changelog": bool(commit_type == "emoji"), }, commit_type, ), }, }, { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": DEFAULT_BRANCH_NAME}, }, { **merge_dev_into_main, }, { "action": RepoActionStep.RELEASE, "details": { "version": new_version, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitons, }, }, ], }, }, ] ) # Add a feature and officially release it new_version = "0.2.0" repo_construction_steps.extend( [ *fast_forward_dev_branch_actions, { "action": RepoActionStep.GIT_CHECKOUT, "details": { "create_branch": { "name": FEAT_BRANCH_2_NAME, "start_branch": DEV_BRANCH_NAME, } }, }, { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "conventional": "feat: add a new feature", "emoji": ":sparkles: add a new feature", "scipy": "ENH: add a new feature", "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, ), }, }, { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": DEV_BRANCH_NAME}, }, { "action": RepoActionStep.GIT_MERGE, "details": { "branch_name": FEAT_BRANCH_2_NAME, "fast_forward": False, "commit_def": convert_commit_spec_to_commit_def( { "conventional": format_merge_commit_msg_git( branch_name=FEAT_BRANCH_2_NAME, tgt_branch_name=DEV_BRANCH_NAME, ), "emoji": format_merge_commit_msg_git( branch_name=FEAT_BRANCH_2_NAME, tgt_branch_name=DEV_BRANCH_NAME, ), "scipy": format_merge_commit_msg_git( branch_name=FEAT_BRANCH_2_NAME, tgt_branch_name=DEV_BRANCH_NAME, ), "datetime": next(commit_timestamp_gen), "include_in_changelog": bool(commit_type == "emoji"), }, commit_type, ), }, }, { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": DEFAULT_BRANCH_NAME}, }, { **merge_dev_into_main, }, { "action": RepoActionStep.RELEASE, "details": { "version": new_version, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitons, }, }, ], }, }, ] ) # Add a breaking change feature and officially release it new_version = "1.0.0" repo_construction_steps.extend( [ *fast_forward_dev_branch_actions, { "action": RepoActionStep.GIT_CHECKOUT, "details": { "create_branch": { "name": FEAT_BRANCH_3_NAME, "start_branch": DEV_BRANCH_NAME, } }, }, { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "conventional": str.join( "\n\n", [ "feat: add revolutionary feature", "BREAKING CHANGE: this is a breaking change", ], ), "emoji": str.join( "\n\n", [ ":boom: add revolutionary feature", "This change is a breaking change", ], ), "scipy": str.join( "\n\n", [ "API: add revolutionary feature", "BREAKING CHANGE: this is a breaking change", ], ), "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, ), }, }, { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": DEV_BRANCH_NAME}, }, { "action": RepoActionStep.GIT_MERGE, "details": { "branch_name": FEAT_BRANCH_3_NAME, "fast_forward": False, "commit_def": convert_commit_spec_to_commit_def( { "conventional": format_merge_commit_msg_git( branch_name=FEAT_BRANCH_3_NAME, tgt_branch_name=DEV_BRANCH_NAME, ), "emoji": format_merge_commit_msg_git( branch_name=FEAT_BRANCH_3_NAME, tgt_branch_name=DEV_BRANCH_NAME, ), "scipy": format_merge_commit_msg_git( branch_name=FEAT_BRANCH_3_NAME, tgt_branch_name=DEV_BRANCH_NAME, ), "datetime": next(commit_timestamp_gen), "include_in_changelog": bool(commit_type == "emoji"), }, commit_type, ), }, }, { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": DEFAULT_BRANCH_NAME}, }, { **merge_dev_into_main, }, { "action": RepoActionStep.RELEASE, "details": { "version": new_version, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitons, }, }, ], }, }, ] ) # Make a fix and officially release new_version = "1.0.1" repo_construction_steps.extend( [ *fast_forward_dev_branch_actions, { "action": RepoActionStep.GIT_CHECKOUT, "details": { "create_branch": { "name": FIX_BRANCH_1_NAME, "start_branch": DEV_BRANCH_NAME, } }, }, { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "conventional": "fix: correct a bug", "emoji": ":bug: correct a bug", "scipy": "BUG: correct a bug", "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, ), }, }, { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": DEV_BRANCH_NAME}, }, { "action": RepoActionStep.GIT_MERGE, "details": { "branch_name": FIX_BRANCH_1_NAME, "fast_forward": False, "commit_def": convert_commit_spec_to_commit_def( { "conventional": format_merge_commit_msg_git( branch_name=FIX_BRANCH_1_NAME, tgt_branch_name=DEV_BRANCH_NAME, ), "emoji": format_merge_commit_msg_git( branch_name=FIX_BRANCH_1_NAME, tgt_branch_name=DEV_BRANCH_NAME, ), "scipy": format_merge_commit_msg_git( branch_name=FIX_BRANCH_1_NAME, tgt_branch_name=DEV_BRANCH_NAME, ), "datetime": next(commit_timestamp_gen), "include_in_changelog": bool(commit_type == "emoji"), }, commit_type, ), }, }, { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": DEFAULT_BRANCH_NAME}, }, { **merge_dev_into_main, }, { "action": RepoActionStep.RELEASE, "details": { "version": new_version, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitons, }, }, ], }, }, ] ) # Make a fix and Add multiple feature changes before officially releasing new_version = "1.1.0" repo_construction_steps.extend( [ *fast_forward_dev_branch_actions, { "action": RepoActionStep.GIT_CHECKOUT, "details": { "create_branch": { "name": FIX_BRANCH_2_NAME, "start_branch": DEV_BRANCH_NAME, } }, }, { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "conventional": "fix: correct another bug", "emoji": ":bug: correct another bug", "scipy": "BUG: correct another bug", "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, ), }, }, { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": DEV_BRANCH_NAME}, }, { "action": RepoActionStep.GIT_MERGE, "details": { "branch_name": FIX_BRANCH_2_NAME, "fast_forward": False, "commit_def": convert_commit_spec_to_commit_def( { "conventional": format_merge_commit_msg_git( branch_name=FIX_BRANCH_2_NAME, tgt_branch_name=DEV_BRANCH_NAME, ), "emoji": format_merge_commit_msg_git( branch_name=FIX_BRANCH_2_NAME, tgt_branch_name=DEV_BRANCH_NAME, ), "scipy": format_merge_commit_msg_git( branch_name=FIX_BRANCH_2_NAME, tgt_branch_name=DEV_BRANCH_NAME, ), "datetime": next(commit_timestamp_gen), "include_in_changelog": bool(commit_type == "emoji"), }, commit_type, ), }, }, { "action": RepoActionStep.GIT_CHECKOUT, "details": { "create_branch": { "name": FEAT_BRANCH_4_NAME, "start_branch": DEV_BRANCH_NAME, } }, }, { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "conventional": "feat(cli): add new config cli command", "emoji": ":sparkles: (cli) add new config cli command", "scipy": "ENH(cli): add new config cli command", "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, ), }, }, { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": DEV_BRANCH_NAME}, }, { "action": RepoActionStep.GIT_MERGE, "details": { "branch_name": FEAT_BRANCH_4_NAME, "fast_forward": False, "commit_def": convert_commit_spec_to_commit_def( { "conventional": format_merge_commit_msg_git( branch_name=FEAT_BRANCH_4_NAME, tgt_branch_name=DEV_BRANCH_NAME, ), "emoji": format_merge_commit_msg_git( branch_name=FEAT_BRANCH_4_NAME, tgt_branch_name=DEV_BRANCH_NAME, ), "scipy": format_merge_commit_msg_git( branch_name=FEAT_BRANCH_4_NAME, tgt_branch_name=DEV_BRANCH_NAME, ), "datetime": next(commit_timestamp_gen), "include_in_changelog": bool(commit_type == "emoji"), }, commit_type, ), }, }, { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": DEFAULT_BRANCH_NAME}, }, { **merge_dev_into_main, }, { "action": RepoActionStep.RELEASE, "details": { "version": new_version, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitons, }, }, ], }, }, ] ) return repo_construction_steps return _get_repo_from_defintion @pytest.fixture(scope="session") def build_git_flow_repo_w_1_release_channels( build_repo_from_definition: BuildRepoFromDefinitionFn, get_repo_definition_4_git_flow_repo_w_1_release_channels: GetRepoDefinitionFn, get_cached_repo_data: GetCachedRepoDataFn, build_repo_or_copy_cache: BuildRepoOrCopyCacheFn, build_spec_hash_4_git_flow_repo_w_1_release_channels: str, ) -> BuildSpecificRepoFn: def _build_specific_repo_type( repo_name: str, commit_type: CommitConvention, dest_dir: Path ) -> Sequence[RepoActions]: def _build_repo(cached_repo_path: Path) -> Sequence[RepoActions]: repo_construction_steps = ( get_repo_definition_4_git_flow_repo_w_1_release_channels( commit_type=commit_type, ) ) return build_repo_from_definition(cached_repo_path, repo_construction_steps) build_repo_or_copy_cache( repo_name=repo_name, build_spec_hash=build_spec_hash_4_git_flow_repo_w_1_release_channels, build_repo_func=_build_repo, dest_dir=dest_dir, ) if not (cached_repo_data := get_cached_repo_data(proj_dirname=repo_name)): raise ValueError("Failed to retrieve repo data from cache") return cached_repo_data["build_definition"] return _build_specific_repo_type # --------------------------------------------------------------------------- # # Test-level fixtures that will cache the built directory & set up test case # # --------------------------------------------------------------------------- # @pytest.fixture def repo_w_git_flow_conventional_commits( build_git_flow_repo_w_1_release_channels: BuildSpecificRepoFn, example_project_git_repo: ExProjectGitRepoFn, example_project_dir: ExProjectDir, change_to_ex_proj_dir: None, ) -> BuiltRepoResult: repo_name = repo_w_git_flow_conventional_commits.__name__ commit_type: CommitConvention = repo_name.split("_")[-2] # type: ignore[assignment] return { "definition": build_git_flow_repo_w_1_release_channels( repo_name=repo_name, commit_type=commit_type, dest_dir=example_project_dir, ), "repo": example_project_git_repo(), } @pytest.fixture def repo_w_git_flow_emoji_commits( build_git_flow_repo_w_1_release_channels: BuildSpecificRepoFn, example_project_git_repo: ExProjectGitRepoFn, example_project_dir: ExProjectDir, change_to_ex_proj_dir: None, ) -> BuiltRepoResult: repo_name = repo_w_git_flow_emoji_commits.__name__ commit_type: CommitConvention = repo_name.split("_")[-2] # type: ignore[assignment] return { "definition": build_git_flow_repo_w_1_release_channels( repo_name=repo_name, commit_type=commit_type, dest_dir=example_project_dir, ), "repo": example_project_git_repo(), } @pytest.fixture def repo_w_git_flow_scipy_commits( build_git_flow_repo_w_1_release_channels: BuildSpecificRepoFn, example_project_git_repo: ExProjectGitRepoFn, example_project_dir: ExProjectDir, change_to_ex_proj_dir: None, ) -> BuiltRepoResult: repo_name = repo_w_git_flow_scipy_commits.__name__ commit_type: CommitConvention = repo_name.split("_")[-2] # type: ignore[assignment] return { "definition": build_git_flow_repo_w_1_release_channels( repo_name=repo_name, commit_type=commit_type, dest_dir=example_project_dir, ), "repo": example_project_git_repo(), } python-semantic-release-9.21.0/tests/fixtures/repos/git_flow/repo_w_2_release_channels.py000066400000000000000000001055661475670435200320130ustar00rootroot00000000000000from __future__ import annotations from datetime import timedelta from itertools import count from pathlib import Path from typing import TYPE_CHECKING import pytest from semantic_release.cli.config import ChangelogOutputFormat import tests.conftest import tests.const import tests.util from tests.const import ( DEFAULT_BRANCH_NAME, EXAMPLE_HVCS_DOMAIN, INITIAL_COMMIT_MESSAGE, RepoActionStep, ) if TYPE_CHECKING: from typing import Sequence from tests.conftest import ( GetCachedRepoDataFn, GetMd5ForSetOfFilesFn, GetStableDateNowFn, ) from tests.fixtures.example_project import ExProjectDir from tests.fixtures.git_repo import ( BuildRepoFromDefinitionFn, BuildRepoOrCopyCacheFn, BuildSpecificRepoFn, BuiltRepoResult, CommitConvention, ConvertCommitSpecsToCommitDefsFn, ConvertCommitSpecToCommitDefFn, ExProjectGitRepoFn, FormatGitMergeCommitMsgFn, GetRepoDefinitionFn, RepoActionGitMerge, RepoActions, RepoActionWriteChangelogsDestFile, TomlSerializableTypes, ) DEV_BRANCH_NAME = "dev" FEAT_BRANCH_1_NAME = "feat/feature-1" FEAT_BRANCH_2_NAME = "feat/feature-2" FEAT_BRANCH_3_NAME = "feat/feature-3" FEAT_BRANCH_4_NAME = "feat/feature-4" FIX_BRANCH_1_NAME = "fix/patch-1" @pytest.fixture(scope="session") def deps_files_4_git_flow_repo_w_2_release_channels( deps_files_4_example_git_project: list[Path], ) -> list[Path]: return [ *deps_files_4_example_git_project, # This file Path(__file__).absolute(), # because of imports Path(tests.const.__file__).absolute(), Path(tests.util.__file__).absolute(), # because of the fixtures Path(tests.conftest.__file__).absolute(), ] @pytest.fixture(scope="session") def build_spec_hash_4_git_flow_repo_w_2_release_channels( get_md5_for_set_of_files: GetMd5ForSetOfFilesFn, deps_files_4_git_flow_repo_w_2_release_channels: list[Path], ) -> str: # Generates a hash of the build spec to set when to invalidate the cache return get_md5_for_set_of_files(deps_files_4_git_flow_repo_w_2_release_channels) @pytest.fixture(scope="session") def get_repo_definition_4_git_flow_repo_w_2_release_channels( convert_commit_specs_to_commit_defs: ConvertCommitSpecsToCommitDefsFn, convert_commit_spec_to_commit_def: ConvertCommitSpecToCommitDefFn, format_merge_commit_msg_git: FormatGitMergeCommitMsgFn, changelog_md_file: Path, changelog_rst_file: Path, stable_now_date: GetStableDateNowFn, ) -> GetRepoDefinitionFn: """ This fixture returns a function that when called will define the actions needed to build a git repo that uses the git flow branching strategy and git merge commits with 2 release channels 1. alpha feature releases (x.x.x-alpha.x) 2. official (production) releases (x.x.x) """ def _get_repo_from_defintion( commit_type: CommitConvention, hvcs_client_name: str = "github", hvcs_domain: str = EXAMPLE_HVCS_DOMAIN, tag_format_str: str | None = None, extra_configs: dict[str, TomlSerializableTypes] | None = None, mask_initial_release: bool = False, ) -> Sequence[RepoActions]: stable_now_datetime = stable_now_date() commit_timestamp_gen = ( (stable_now_datetime + timedelta(seconds=i)).isoformat(timespec="seconds") for i in count(step=1) ) # Common static actions or components changelog_file_definitons: Sequence[RepoActionWriteChangelogsDestFile] = [ { "path": changelog_md_file, "format": ChangelogOutputFormat.MARKDOWN, }, { "path": changelog_rst_file, "format": ChangelogOutputFormat.RESTRUCTURED_TEXT, }, ] fast_forward_dev_branch_actions: Sequence[RepoActions] = [ { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": DEV_BRANCH_NAME}, }, { "action": RepoActionStep.GIT_MERGE, "details": { "branch_name": DEFAULT_BRANCH_NAME, "fast_forward": True, }, }, ] merge_dev_into_main: RepoActionGitMerge = { "action": RepoActionStep.GIT_MERGE, "details": { "branch_name": DEV_BRANCH_NAME, "fast_forward": False, "commit_def": convert_commit_spec_to_commit_def( { "conventional": format_merge_commit_msg_git( branch_name=DEV_BRANCH_NAME, tgt_branch_name=DEFAULT_BRANCH_NAME, ), "emoji": format_merge_commit_msg_git( branch_name=DEV_BRANCH_NAME, tgt_branch_name=DEFAULT_BRANCH_NAME, ), "scipy": format_merge_commit_msg_git( branch_name=DEV_BRANCH_NAME, tgt_branch_name=DEFAULT_BRANCH_NAME, ), "datetime": next(commit_timestamp_gen), "include_in_changelog": bool(commit_type == "emoji"), }, commit_type, ), }, } # Define All the steps required to create the repository repo_construction_steps: list[RepoActions] = [] repo_construction_steps.append( { "action": RepoActionStep.CONFIGURE, "details": { "commit_type": commit_type, "hvcs_client_name": hvcs_client_name, "hvcs_domain": hvcs_domain, "tag_format_str": tag_format_str, "mask_initial_release": mask_initial_release, "extra_configs": { # Set the default release branch "tool.semantic_release.branches.main": { "match": r"^(main|master)$", "prerelease": False, }, # branch "feature" has prerelease suffix of "alpha" "tool.semantic_release.branches.features": { "match": r"^feat/.+", "prerelease": True, "prerelease_token": "alpha", }, "tool.semantic_release.allow_zero_version": True, "tool.semantic_release.major_on_zero": True, **(extra_configs or {}), }, }, } ) # Make initial release new_version = "0.1.0" repo_construction_steps.extend( [ { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": [ # only one commit to start the main branch convert_commit_spec_to_commit_def( { "conventional": INITIAL_COMMIT_MESSAGE, "emoji": INITIAL_COMMIT_MESSAGE, "scipy": INITIAL_COMMIT_MESSAGE, "datetime": next(commit_timestamp_gen), "include_in_changelog": bool( commit_type == "emoji" ), }, commit_type, ), ], }, }, { "action": RepoActionStep.GIT_CHECKOUT, "details": { "create_branch": { "name": DEV_BRANCH_NAME, "start_branch": DEFAULT_BRANCH_NAME, }, }, }, { "action": RepoActionStep.GIT_CHECKOUT, "details": { "create_branch": { "name": FEAT_BRANCH_1_NAME, "start_branch": DEV_BRANCH_NAME, }, }, }, { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "conventional": "feat: add new feature", "emoji": ":sparkles: add new feature", "scipy": "ENH: add new feature", "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, ), }, }, { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": DEV_BRANCH_NAME}, }, { "action": RepoActionStep.GIT_MERGE, "details": { "branch_name": FEAT_BRANCH_1_NAME, "fast_forward": False, "commit_def": convert_commit_spec_to_commit_def( { "conventional": format_merge_commit_msg_git( branch_name=FEAT_BRANCH_1_NAME, tgt_branch_name=DEV_BRANCH_NAME, ), "emoji": format_merge_commit_msg_git( branch_name=FEAT_BRANCH_1_NAME, tgt_branch_name=DEV_BRANCH_NAME, ), "scipy": format_merge_commit_msg_git( branch_name=FEAT_BRANCH_1_NAME, tgt_branch_name=DEV_BRANCH_NAME, ), "datetime": next(commit_timestamp_gen), "include_in_changelog": bool(commit_type == "emoji"), }, commit_type, ), }, }, { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": DEFAULT_BRANCH_NAME}, }, { **merge_dev_into_main, }, { "action": RepoActionStep.RELEASE, "details": { "version": new_version, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitons, }, }, ], }, }, ] ) # Add a feature and release it as an alpha release new_version = "0.2.0-alpha.1" repo_construction_steps.extend( [ *fast_forward_dev_branch_actions, { "action": RepoActionStep.GIT_CHECKOUT, "details": { "create_branch": { "name": FEAT_BRANCH_2_NAME, "start_branch": DEV_BRANCH_NAME, } }, }, { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "conventional": "feat: add a new feature", "emoji": ":sparkles: add a new feature", "scipy": "ENH: add a new feature", "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, ), }, }, { "action": RepoActionStep.RELEASE, "details": { "version": new_version, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitons, }, }, ], }, }, ] ) # Add a feature and release it as an alpha release new_version = "1.0.0-alpha.1" repo_construction_steps.extend( [ { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "conventional": str.join( "\n\n", [ "feat: add revolutionary feature", "BREAKING CHANGE: this is a breaking change", ], ), "emoji": str.join( "\n\n", [ ":boom: add revolutionary feature", "This change is a breaking change", ], ), "scipy": str.join( "\n\n", [ "API: add revolutionary feature", "BREAKING CHANGE: this is a breaking change", ], ), "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, ), }, }, { "action": RepoActionStep.RELEASE, "details": { "version": new_version, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitons, }, }, ], }, }, ] ) # Add another feature and officially release new_version = "1.0.0" repo_construction_steps.extend( [ { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "conventional": "feat: add some more text", "emoji": ":sparkles: add some more text", "scipy": "ENH: add some more text", "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, ), }, }, { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": DEV_BRANCH_NAME}, }, { "action": RepoActionStep.GIT_MERGE, "details": { "branch_name": FEAT_BRANCH_2_NAME, "fast_forward": False, "commit_def": convert_commit_spec_to_commit_def( { "conventional": format_merge_commit_msg_git( branch_name=FEAT_BRANCH_2_NAME, tgt_branch_name=DEV_BRANCH_NAME, ), "emoji": format_merge_commit_msg_git( branch_name=FEAT_BRANCH_2_NAME, tgt_branch_name=DEV_BRANCH_NAME, ), "scipy": format_merge_commit_msg_git( branch_name=FEAT_BRANCH_2_NAME, tgt_branch_name=DEV_BRANCH_NAME, ), "datetime": next(commit_timestamp_gen), "include_in_changelog": bool(commit_type == "emoji"), }, commit_type, ), }, }, { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": DEFAULT_BRANCH_NAME}, }, { **merge_dev_into_main, }, { "action": RepoActionStep.RELEASE, "details": { "version": new_version, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitons, }, }, ], }, }, ] ) # Add another feature and officially release (no intermediate alpha release) new_version = "1.1.0" repo_construction_steps.extend( [ *fast_forward_dev_branch_actions, { "action": RepoActionStep.GIT_CHECKOUT, "details": { "create_branch": { "name": FEAT_BRANCH_3_NAME, "start_branch": DEV_BRANCH_NAME, } }, }, { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "conventional": "feat(cli): add new config cli command", "emoji": ":sparkles: (cli) add new config cli command", "scipy": "ENH(cli): add new config cli command", "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, ), }, }, { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": DEV_BRANCH_NAME}, }, { "action": RepoActionStep.GIT_MERGE, "details": { "branch_name": FEAT_BRANCH_3_NAME, "fast_forward": False, "commit_def": convert_commit_spec_to_commit_def( { "conventional": format_merge_commit_msg_git( branch_name=FEAT_BRANCH_3_NAME, tgt_branch_name=DEV_BRANCH_NAME, ), "emoji": format_merge_commit_msg_git( branch_name=FEAT_BRANCH_3_NAME, tgt_branch_name=DEV_BRANCH_NAME, ), "scipy": format_merge_commit_msg_git( branch_name=FEAT_BRANCH_3_NAME, tgt_branch_name=DEV_BRANCH_NAME, ), "datetime": next(commit_timestamp_gen), "include_in_changelog": bool(commit_type == "emoji"), }, commit_type, ), }, }, { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": DEFAULT_BRANCH_NAME}, }, { **merge_dev_into_main, }, { "action": RepoActionStep.RELEASE, "details": { "version": new_version, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitons, }, }, ], }, }, ] ) # Make a fix and officially release new_version = "1.1.1" repo_construction_steps.extend( [ *fast_forward_dev_branch_actions, { "action": RepoActionStep.GIT_CHECKOUT, "details": { "create_branch": { "name": FIX_BRANCH_1_NAME, "start_branch": DEV_BRANCH_NAME, } }, }, { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "conventional": "fix(config): fixed configuration generation", "emoji": ":bug: (config) fixed configuration generation", "scipy": "MAINT(config): fixed configuration generation", "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, ), }, }, { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": DEV_BRANCH_NAME}, }, { "action": RepoActionStep.GIT_MERGE, "details": { "branch_name": FIX_BRANCH_1_NAME, "fast_forward": False, "commit_def": convert_commit_spec_to_commit_def( { "conventional": format_merge_commit_msg_git( branch_name=FIX_BRANCH_1_NAME, tgt_branch_name=DEV_BRANCH_NAME, ), "emoji": format_merge_commit_msg_git( branch_name=FIX_BRANCH_1_NAME, tgt_branch_name=DEV_BRANCH_NAME, ), "scipy": format_merge_commit_msg_git( branch_name=FIX_BRANCH_1_NAME, tgt_branch_name=DEV_BRANCH_NAME, ), "datetime": next(commit_timestamp_gen), "include_in_changelog": bool(commit_type == "emoji"), }, commit_type, ), }, }, { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": DEFAULT_BRANCH_NAME}, }, { **merge_dev_into_main, }, { "action": RepoActionStep.RELEASE, "details": { "version": new_version, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitons, }, }, ], }, }, ] ) # Introduce a new feature and create a prerelease for it new_version = "1.2.0-alpha.1" repo_construction_steps.extend( [ *fast_forward_dev_branch_actions, { "action": RepoActionStep.GIT_CHECKOUT, "details": { "create_branch": { "name": FEAT_BRANCH_4_NAME, "start_branch": DEV_BRANCH_NAME, } }, }, { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "conventional": "feat: add some more text", "emoji": ":sparkles: add some more text", "scipy": "ENH: add some more text", "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, ), }, }, { "action": RepoActionStep.RELEASE, "details": { "version": new_version, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitons, }, }, ], }, }, ] ) # Fix the previous alpha & add additional feature and create a subsequent prerelease for it new_version = "1.2.0-alpha.2" repo_construction_steps.extend( [ { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "conventional": "fix(scope): correct some text", "emoji": ":bug: (scope) correct some text", "scipy": "MAINT(scope): correct some text", "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, { "conventional": "feat(scope): add some more text", "emoji": ":sparkles:(scope) add some more text", "scipy": "ENH(scope): add some more text", "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, ), }, }, { "action": RepoActionStep.RELEASE, "details": { "version": new_version, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitons, }, }, ], }, }, ] ) return repo_construction_steps return _get_repo_from_defintion @pytest.fixture(scope="session") def build_git_flow_repo_w_2_release_channels( build_repo_from_definition: BuildRepoFromDefinitionFn, get_repo_definition_4_git_flow_repo_w_2_release_channels: GetRepoDefinitionFn, get_cached_repo_data: GetCachedRepoDataFn, build_repo_or_copy_cache: BuildRepoOrCopyCacheFn, build_spec_hash_4_git_flow_repo_w_2_release_channels: str, ) -> BuildSpecificRepoFn: def _build_specific_repo_type( repo_name: str, commit_type: CommitConvention, dest_dir: Path ) -> Sequence[RepoActions]: def _build_repo(cached_repo_path: Path) -> Sequence[RepoActions]: repo_construction_steps = ( get_repo_definition_4_git_flow_repo_w_2_release_channels( commit_type=commit_type, ) ) return build_repo_from_definition(cached_repo_path, repo_construction_steps) build_repo_or_copy_cache( repo_name=repo_name, build_spec_hash=build_spec_hash_4_git_flow_repo_w_2_release_channels, build_repo_func=_build_repo, dest_dir=dest_dir, ) if not (cached_repo_data := get_cached_repo_data(proj_dirname=repo_name)): raise ValueError("Failed to retrieve repo data from cache") return cached_repo_data["build_definition"] return _build_specific_repo_type # --------------------------------------------------------------------------- # # Test-level fixtures that will cache the built directory & set up test case # # --------------------------------------------------------------------------- # @pytest.fixture def repo_w_git_flow_w_alpha_prereleases_n_conventional_commits( build_git_flow_repo_w_2_release_channels: BuildSpecificRepoFn, example_project_git_repo: ExProjectGitRepoFn, example_project_dir: ExProjectDir, change_to_ex_proj_dir: None, ) -> BuiltRepoResult: repo_name = repo_w_git_flow_w_alpha_prereleases_n_conventional_commits.__name__ commit_type: CommitConvention = repo_name.split("_")[-2] # type: ignore[assignment] return { "definition": build_git_flow_repo_w_2_release_channels( repo_name=repo_name, commit_type=commit_type, dest_dir=example_project_dir, ), "repo": example_project_git_repo(), } @pytest.fixture def repo_w_git_flow_w_alpha_prereleases_n_emoji_commits( build_git_flow_repo_w_2_release_channels: BuildSpecificRepoFn, example_project_git_repo: ExProjectGitRepoFn, example_project_dir: ExProjectDir, change_to_ex_proj_dir: None, ) -> BuiltRepoResult: repo_name = repo_w_git_flow_w_alpha_prereleases_n_emoji_commits.__name__ commit_type: CommitConvention = repo_name.split("_")[-2] # type: ignore[assignment] return { "definition": build_git_flow_repo_w_2_release_channels( repo_name=repo_name, commit_type=commit_type, dest_dir=example_project_dir, ), "repo": example_project_git_repo(), } @pytest.fixture def repo_w_git_flow_w_alpha_prereleases_n_scipy_commits( build_git_flow_repo_w_2_release_channels: BuildSpecificRepoFn, example_project_git_repo: ExProjectGitRepoFn, example_project_dir: ExProjectDir, change_to_ex_proj_dir: None, ) -> BuiltRepoResult: repo_name = repo_w_git_flow_w_alpha_prereleases_n_scipy_commits.__name__ commit_type: CommitConvention = repo_name.split("_")[-2] # type: ignore[assignment] return { "definition": build_git_flow_repo_w_2_release_channels( repo_name=repo_name, commit_type=commit_type, dest_dir=example_project_dir, ), "repo": example_project_git_repo(), } python-semantic-release-9.21.0/tests/fixtures/repos/git_flow/repo_w_3_release_channels.py000066400000000000000000001147151475670435200320100ustar00rootroot00000000000000from __future__ import annotations from datetime import timedelta from itertools import count from pathlib import Path from typing import TYPE_CHECKING import pytest from semantic_release.cli.config import ChangelogOutputFormat import tests.conftest import tests.const import tests.util from tests.const import ( DEFAULT_BRANCH_NAME, EXAMPLE_HVCS_DOMAIN, INITIAL_COMMIT_MESSAGE, RepoActionStep, ) if TYPE_CHECKING: from typing import Sequence from tests.conftest import ( GetCachedRepoDataFn, GetMd5ForSetOfFilesFn, GetStableDateNowFn, ) from tests.fixtures.example_project import ExProjectDir from tests.fixtures.git_repo import ( BuildRepoFromDefinitionFn, BuildRepoOrCopyCacheFn, BuildSpecificRepoFn, BuiltRepoResult, CommitConvention, ConvertCommitSpecsToCommitDefsFn, ConvertCommitSpecToCommitDefFn, ExProjectGitRepoFn, FormatGitMergeCommitMsgFn, GetRepoDefinitionFn, RepoActionGitMerge, RepoActions, RepoActionWriteChangelogsDestFile, TomlSerializableTypes, ) DEV_BRANCH_NAME = "dev" FEAT_BRANCH_1_NAME = "feat/feature-1" FEAT_BRANCH_2_NAME = "feat/feature-2" FEAT_BRANCH_3_NAME = "feat/feature-3" FEAT_BRANCH_4_NAME = "feat/feature-4" FIX_BRANCH_1_NAME = "fix/patch-1" FIX_BRANCH_2_NAME = "fix/patch-2" @pytest.fixture(scope="session") def deps_files_4_git_flow_repo_w_3_release_channels( deps_files_4_example_git_project: list[Path], ) -> list[Path]: return [ *deps_files_4_example_git_project, # This file Path(__file__).absolute(), # because of imports Path(tests.const.__file__).absolute(), Path(tests.util.__file__).absolute(), # because of the fixtures Path(tests.conftest.__file__).absolute(), ] @pytest.fixture(scope="session") def build_spec_hash_4_git_flow_repo_w_3_release_channels( get_md5_for_set_of_files: GetMd5ForSetOfFilesFn, deps_files_4_git_flow_repo_w_3_release_channels: list[Path], ) -> str: # Generates a hash of the build spec to set when to invalidate the cache return get_md5_for_set_of_files(deps_files_4_git_flow_repo_w_3_release_channels) @pytest.fixture(scope="session") def get_repo_definition_4_git_flow_repo_w_3_release_channels( convert_commit_specs_to_commit_defs: ConvertCommitSpecsToCommitDefsFn, convert_commit_spec_to_commit_def: ConvertCommitSpecToCommitDefFn, format_merge_commit_msg_git: FormatGitMergeCommitMsgFn, changelog_md_file: Path, changelog_rst_file: Path, stable_now_date: GetStableDateNowFn, ) -> GetRepoDefinitionFn: """ This fixture returns a function that when called will define the actions needed to build a git repo that uses the git flow branching strategy and git merge commits with 2 release channels 1. alpha feature releases (x.x.x-alpha.x) 2. release candidate releases (x.x.x-rc.x) 3. official (production) releases (x.x.x) """ def _get_repo_from_defintion( commit_type: CommitConvention, hvcs_client_name: str = "github", hvcs_domain: str = EXAMPLE_HVCS_DOMAIN, tag_format_str: str | None = None, extra_configs: dict[str, TomlSerializableTypes] | None = None, mask_initial_release: bool = False, ) -> Sequence[RepoActions]: stable_now_datetime = stable_now_date() commit_timestamp_gen = ( (stable_now_datetime + timedelta(seconds=i)).isoformat(timespec="seconds") for i in count(step=1) ) # Common static actions or components changelog_file_definitons: Sequence[RepoActionWriteChangelogsDestFile] = [ { "path": changelog_md_file, "format": ChangelogOutputFormat.MARKDOWN, }, { "path": changelog_rst_file, "format": ChangelogOutputFormat.RESTRUCTURED_TEXT, }, ] fast_forward_dev_branch_actions: Sequence[RepoActions] = [ { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": DEV_BRANCH_NAME}, }, { "action": RepoActionStep.GIT_MERGE, "details": { "branch_name": DEFAULT_BRANCH_NAME, "fast_forward": True, }, }, ] merge_dev_into_main: RepoActionGitMerge = { "action": RepoActionStep.GIT_MERGE, "details": { "branch_name": DEV_BRANCH_NAME, "fast_forward": False, "commit_def": convert_commit_spec_to_commit_def( { "conventional": format_merge_commit_msg_git( branch_name=DEV_BRANCH_NAME, tgt_branch_name=DEFAULT_BRANCH_NAME, ), "emoji": format_merge_commit_msg_git( branch_name=DEV_BRANCH_NAME, tgt_branch_name=DEFAULT_BRANCH_NAME, ), "scipy": format_merge_commit_msg_git( branch_name=DEV_BRANCH_NAME, tgt_branch_name=DEFAULT_BRANCH_NAME, ), "datetime": next(commit_timestamp_gen), "include_in_changelog": bool(commit_type == "emoji"), }, commit_type, ), }, } # Define All the steps required to create the repository repo_construction_steps: list[RepoActions] = [] repo_construction_steps.append( { "action": RepoActionStep.CONFIGURE, "details": { "commit_type": commit_type, "hvcs_client_name": hvcs_client_name, "hvcs_domain": hvcs_domain, "tag_format_str": tag_format_str, "mask_initial_release": mask_initial_release, "extra_configs": { # Set the default release branch "tool.semantic_release.branches.main": { "match": r"^(main|master)$", "prerelease": False, }, # branch "dev" has prerelease suffix of "rc" "tool.semantic_release.branches.dev": { "match": r"^dev$", "prerelease": True, "prerelease_token": "rc", }, # branch "feature" has prerelease suffix of "alpha" "tool.semantic_release.branches.features": { "match": r"^feat/.+", "prerelease": True, "prerelease_token": "alpha", }, "tool.semantic_release.allow_zero_version": True, "tool.semantic_release.major_on_zero": True, **(extra_configs or {}), }, }, } ) # Make initial release new_version = "0.1.0" repo_construction_steps.extend( [ { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": [ # only one commit to start the main branch convert_commit_spec_to_commit_def( { "conventional": INITIAL_COMMIT_MESSAGE, "emoji": INITIAL_COMMIT_MESSAGE, "scipy": INITIAL_COMMIT_MESSAGE, "datetime": next(commit_timestamp_gen), "include_in_changelog": bool( commit_type == "emoji" ), }, commit_type, ), ], }, }, { "action": RepoActionStep.GIT_CHECKOUT, "details": { "create_branch": { "name": DEV_BRANCH_NAME, "start_branch": DEFAULT_BRANCH_NAME, }, }, }, { "action": RepoActionStep.GIT_CHECKOUT, "details": { "create_branch": { "name": FEAT_BRANCH_1_NAME, "start_branch": DEV_BRANCH_NAME, }, }, }, { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "conventional": "feat: add new feature", "emoji": ":sparkles: add new feature", "scipy": "ENH: add new feature", "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, ), }, }, { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": DEV_BRANCH_NAME}, }, { "action": RepoActionStep.GIT_MERGE, "details": { "branch_name": FEAT_BRANCH_1_NAME, "fast_forward": False, "commit_def": convert_commit_spec_to_commit_def( { "conventional": format_merge_commit_msg_git( branch_name=FEAT_BRANCH_1_NAME, tgt_branch_name=DEV_BRANCH_NAME, ), "emoji": format_merge_commit_msg_git( branch_name=FEAT_BRANCH_1_NAME, tgt_branch_name=DEV_BRANCH_NAME, ), "scipy": format_merge_commit_msg_git( branch_name=FEAT_BRANCH_1_NAME, tgt_branch_name=DEV_BRANCH_NAME, ), "datetime": next(commit_timestamp_gen), "include_in_changelog": bool(commit_type == "emoji"), }, commit_type, ), }, }, { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": DEFAULT_BRANCH_NAME}, }, { **merge_dev_into_main, }, { "action": RepoActionStep.RELEASE, "details": { "version": new_version, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitons, }, }, ], }, }, ] ) # Add a feature and release it as an alpha release new_version = "0.2.0-alpha.1" repo_construction_steps.extend( [ *fast_forward_dev_branch_actions, { "action": RepoActionStep.GIT_CHECKOUT, "details": { "create_branch": { "name": FEAT_BRANCH_2_NAME, "start_branch": DEV_BRANCH_NAME, } }, }, { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "conventional": "feat: add a new feature", "emoji": ":sparkles: add a new feature", "scipy": "ENH: add a new feature", "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, ), }, }, { "action": RepoActionStep.RELEASE, "details": { "version": new_version, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitons, }, }, ], }, }, ] ) # Make a breaking feature change and release it as an alpha release new_version = "1.0.0-alpha.1" repo_construction_steps.extend( [ { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "conventional": str.join( "\n\n", [ "feat: add revolutionary feature", "BREAKING CHANGE: this is a breaking change", ], ), "emoji": str.join( "\n\n", [ ":boom: add revolutionary feature", "This change is a breaking change", ], ), "scipy": str.join( "\n\n", [ "API: add revolutionary feature", "BREAKING CHANGE: this is a breaking change", ], ), "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, ), }, }, { "action": RepoActionStep.RELEASE, "details": { "version": new_version, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitons, }, }, ], }, }, ] ) # Merge in the successful alpha release and create a release candidate new_version = "1.0.0-rc.1" repo_construction_steps.extend( [ { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": DEV_BRANCH_NAME}, }, { "action": RepoActionStep.GIT_MERGE, "details": { "branch_name": FEAT_BRANCH_2_NAME, "fast_forward": False, "commit_def": convert_commit_spec_to_commit_def( { "conventional": format_merge_commit_msg_git( branch_name=FEAT_BRANCH_2_NAME, tgt_branch_name=DEV_BRANCH_NAME, ), "emoji": format_merge_commit_msg_git( branch_name=FEAT_BRANCH_2_NAME, tgt_branch_name=DEV_BRANCH_NAME, ), "scipy": format_merge_commit_msg_git( branch_name=FEAT_BRANCH_2_NAME, tgt_branch_name=DEV_BRANCH_NAME, ), "datetime": next(commit_timestamp_gen), "include_in_changelog": bool(commit_type == "emoji"), }, commit_type, ), }, }, { "action": RepoActionStep.RELEASE, "details": { "version": new_version, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitons, }, }, ], }, }, ] ) # officially release the sucessful release candidate to production new_version = "1.0.0" repo_construction_steps.extend( [ { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": DEFAULT_BRANCH_NAME}, }, { **merge_dev_into_main, }, { "action": RepoActionStep.RELEASE, "details": { "version": new_version, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitons, }, }, ], }, }, ] ) # Add a feature and release it as an alpha release new_version = "1.1.0-alpha.1" repo_construction_steps.extend( [ *fast_forward_dev_branch_actions, { "action": RepoActionStep.GIT_CHECKOUT, "details": { "create_branch": { "name": FEAT_BRANCH_3_NAME, "start_branch": DEV_BRANCH_NAME, } }, }, { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "conventional": "feat(cli): add new config cli command", "emoji": ":sparkles: (cli) add new config cli command", "scipy": "ENH(cli): add new config cli command", "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, ), }, }, { "action": RepoActionStep.RELEASE, "details": { "version": new_version, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitons, }, }, ], }, }, ] ) # Add another feature and release it as subsequent alpha release new_version = "1.1.0-alpha.2" repo_construction_steps.extend( [ { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "conventional": "feat(config): add new config option", "emoji": ":sparkles: (config) add new config option", "scipy": "ENH(config): add new config option", "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, ), }, }, { "action": RepoActionStep.RELEASE, "details": { "version": new_version, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitons, }, }, ], }, }, ] ) # Merge in the successful alpha release, add a fix, and create a release candidate new_version = "1.1.0-rc.1" repo_construction_steps.extend( [ { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": DEV_BRANCH_NAME}, }, { "action": RepoActionStep.GIT_MERGE, "details": { "branch_name": FEAT_BRANCH_3_NAME, "fast_forward": False, "commit_def": convert_commit_spec_to_commit_def( { "conventional": format_merge_commit_msg_git( branch_name=FEAT_BRANCH_3_NAME, tgt_branch_name=DEV_BRANCH_NAME, ), "emoji": format_merge_commit_msg_git( branch_name=FEAT_BRANCH_3_NAME, tgt_branch_name=DEV_BRANCH_NAME, ), "scipy": format_merge_commit_msg_git( branch_name=FEAT_BRANCH_3_NAME, tgt_branch_name=DEV_BRANCH_NAME, ), "datetime": next(commit_timestamp_gen), "include_in_changelog": bool(commit_type == "emoji"), }, commit_type, ), }, }, { "action": RepoActionStep.GIT_CHECKOUT, "details": { "create_branch": { "name": FIX_BRANCH_1_NAME, "start_branch": DEV_BRANCH_NAME, } }, }, { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "conventional": "fix(cli): fix config cli command", "emoji": ":bug: (cli) fix config cli command", "scipy": "BUG(cli): fix config cli command", "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, ), }, }, { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": DEV_BRANCH_NAME}, }, { "action": RepoActionStep.GIT_MERGE, "details": { "branch_name": FIX_BRANCH_1_NAME, "fast_forward": False, "commit_def": convert_commit_spec_to_commit_def( { "conventional": format_merge_commit_msg_git( branch_name=FIX_BRANCH_1_NAME, tgt_branch_name=DEV_BRANCH_NAME, ), "emoji": format_merge_commit_msg_git( branch_name=FIX_BRANCH_1_NAME, tgt_branch_name=DEV_BRANCH_NAME, ), "scipy": format_merge_commit_msg_git( branch_name=FIX_BRANCH_1_NAME, tgt_branch_name=DEV_BRANCH_NAME, ), "datetime": next(commit_timestamp_gen), "include_in_changelog": bool(commit_type == "emoji"), }, commit_type, ), }, }, { "action": RepoActionStep.RELEASE, "details": { "version": new_version, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitons, }, }, ], }, }, ] ) # fix another bug from the release candidate and create a new release candidate new_version = "1.1.0-rc.2" repo_construction_steps.extend( [ { "action": RepoActionStep.GIT_CHECKOUT, "details": { "create_branch": { "name": FIX_BRANCH_2_NAME, "start_branch": DEV_BRANCH_NAME, } }, }, { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "conventional": "fix(config): fix config option", "emoji": ":bug: (config) fix config option", "scipy": "BUG(config): fix config option", "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, ), }, }, { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": DEV_BRANCH_NAME}, }, { "action": RepoActionStep.GIT_MERGE, "details": { "branch_name": FIX_BRANCH_2_NAME, "fast_forward": False, "commit_def": convert_commit_spec_to_commit_def( { "conventional": format_merge_commit_msg_git( branch_name=FIX_BRANCH_2_NAME, tgt_branch_name=DEV_BRANCH_NAME, ), "emoji": format_merge_commit_msg_git( branch_name=FIX_BRANCH_2_NAME, tgt_branch_name=DEV_BRANCH_NAME, ), "scipy": format_merge_commit_msg_git( branch_name=FIX_BRANCH_2_NAME, tgt_branch_name=DEV_BRANCH_NAME, ), "datetime": next(commit_timestamp_gen), "include_in_changelog": bool(commit_type == "emoji"), }, commit_type, ), }, }, { "action": RepoActionStep.RELEASE, "details": { "version": new_version, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitons, }, }, ], }, }, ] ) # officially release the sucessful release candidate to production new_version = "1.1.0" repo_construction_steps.extend( [ { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": DEFAULT_BRANCH_NAME}, }, { **merge_dev_into_main, }, { "action": RepoActionStep.RELEASE, "details": { "version": new_version, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitons, }, }, ], }, }, ] ) return repo_construction_steps return _get_repo_from_defintion @pytest.fixture(scope="session") def build_git_flow_repo_w_3_release_channels( build_repo_from_definition: BuildRepoFromDefinitionFn, get_repo_definition_4_git_flow_repo_w_3_release_channels: GetRepoDefinitionFn, get_cached_repo_data: GetCachedRepoDataFn, build_repo_or_copy_cache: BuildRepoOrCopyCacheFn, build_spec_hash_4_git_flow_repo_w_3_release_channels: str, ) -> BuildSpecificRepoFn: def _build_specific_repo_type( repo_name: str, commit_type: CommitConvention, dest_dir: Path ) -> Sequence[RepoActions]: def _build_repo(cached_repo_path: Path) -> Sequence[RepoActions]: repo_construction_steps = ( get_repo_definition_4_git_flow_repo_w_3_release_channels( commit_type=commit_type, ) ) return build_repo_from_definition(cached_repo_path, repo_construction_steps) build_repo_or_copy_cache( repo_name=repo_name, build_spec_hash=build_spec_hash_4_git_flow_repo_w_3_release_channels, build_repo_func=_build_repo, dest_dir=dest_dir, ) if not (cached_repo_data := get_cached_repo_data(proj_dirname=repo_name)): raise ValueError("Failed to retrieve repo data from cache") return cached_repo_data["build_definition"] return _build_specific_repo_type # --------------------------------------------------------------------------- # # Test-level fixtures that will cache the built directory & set up test case # # --------------------------------------------------------------------------- # @pytest.fixture def repo_w_git_flow_w_rc_n_alpha_prereleases_n_conventional_commits_using_tag_format( build_repo_from_definition: BuildRepoFromDefinitionFn, get_repo_definition_4_git_flow_repo_w_3_release_channels: GetRepoDefinitionFn, get_cached_repo_data: GetCachedRepoDataFn, build_repo_or_copy_cache: BuildRepoOrCopyCacheFn, build_spec_hash_4_git_flow_repo_w_3_release_channels: str, example_project_git_repo: ExProjectGitRepoFn, example_project_dir: ExProjectDir, change_to_ex_proj_dir: None, ) -> BuiltRepoResult: def _build_repo(cached_repo_path: Path) -> Sequence[RepoActions]: repo_construction_steps = ( get_repo_definition_4_git_flow_repo_w_3_release_channels( commit_type="conventional", tag_format_str="submod-v{version}", ) ) return build_repo_from_definition(cached_repo_path, repo_construction_steps) repo_name = repo_w_git_flow_w_rc_n_alpha_prereleases_n_conventional_commits_using_tag_format.__name__ build_repo_or_copy_cache( repo_name=repo_name, build_spec_hash=build_spec_hash_4_git_flow_repo_w_3_release_channels, build_repo_func=_build_repo, dest_dir=example_project_dir, ) if not (cached_repo_data := get_cached_repo_data(proj_dirname=repo_name)): raise ValueError("Failed to retrieve repo data from cache") return { "definition": cached_repo_data["build_definition"], "repo": example_project_git_repo(), } @pytest.fixture def repo_w_git_flow_w_rc_n_alpha_prereleases_n_conventional_commits( build_git_flow_repo_w_3_release_channels: BuildSpecificRepoFn, example_project_git_repo: ExProjectGitRepoFn, example_project_dir: ExProjectDir, change_to_ex_proj_dir: None, ) -> BuiltRepoResult: repo_name = repo_w_git_flow_w_rc_n_alpha_prereleases_n_conventional_commits.__name__ commit_type: CommitConvention = repo_name.split("_")[-2] # type: ignore[assignment] return { "definition": build_git_flow_repo_w_3_release_channels( repo_name=repo_name, commit_type=commit_type, dest_dir=example_project_dir, ), "repo": example_project_git_repo(), } @pytest.fixture def repo_w_git_flow_w_rc_n_alpha_prereleases_n_emoji_commits( build_git_flow_repo_w_3_release_channels: BuildSpecificRepoFn, example_project_git_repo: ExProjectGitRepoFn, example_project_dir: ExProjectDir, change_to_ex_proj_dir: None, ) -> BuiltRepoResult: repo_name = repo_w_git_flow_w_rc_n_alpha_prereleases_n_emoji_commits.__name__ commit_type: CommitConvention = repo_name.split("_")[-2] # type: ignore[assignment] return { "definition": build_git_flow_repo_w_3_release_channels( repo_name=repo_name, commit_type=commit_type, dest_dir=example_project_dir, ), "repo": example_project_git_repo(), } @pytest.fixture def repo_w_git_flow_w_rc_n_alpha_prereleases_n_scipy_commits( build_git_flow_repo_w_3_release_channels: BuildSpecificRepoFn, example_project_git_repo: ExProjectGitRepoFn, example_project_dir: ExProjectDir, change_to_ex_proj_dir: None, ) -> BuiltRepoResult: repo_name = repo_w_git_flow_w_rc_n_alpha_prereleases_n_scipy_commits.__name__ commit_type: CommitConvention = repo_name.split("_")[-2] # type: ignore[assignment] return { "definition": build_git_flow_repo_w_3_release_channels( repo_name=repo_name, commit_type=commit_type, dest_dir=example_project_dir, ), "repo": example_project_git_repo(), } python-semantic-release-9.21.0/tests/fixtures/repos/git_flow/repo_w_4_release_channels.py000066400000000000000000001052321475670435200320030ustar00rootroot00000000000000from __future__ import annotations from datetime import timedelta from itertools import count from pathlib import Path from typing import TYPE_CHECKING import pytest from semantic_release.cli.config import ChangelogOutputFormat import tests.conftest import tests.const import tests.util from tests.const import ( DEFAULT_BRANCH_NAME, EXAMPLE_HVCS_DOMAIN, INITIAL_COMMIT_MESSAGE, RepoActionStep, ) if TYPE_CHECKING: from typing import Sequence from tests.conftest import ( GetCachedRepoDataFn, GetMd5ForSetOfFilesFn, GetStableDateNowFn, ) from tests.fixtures.example_project import ExProjectDir from tests.fixtures.git_repo import ( BuildRepoFromDefinitionFn, BuildRepoOrCopyCacheFn, BuildSpecificRepoFn, BuiltRepoResult, CommitConvention, ConvertCommitSpecsToCommitDefsFn, ConvertCommitSpecToCommitDefFn, ExProjectGitRepoFn, FormatGitMergeCommitMsgFn, GetRepoDefinitionFn, RepoActionGitMerge, RepoActions, RepoActionWriteChangelogsDestFile, TomlSerializableTypes, ) BETA_BRANCH_NAME = "beta" DEV_BRANCH_NAME = "dev" FEAT_BRANCH_1_NAME = "feat/feature-1" FEAT_BRANCH_2_NAME = "feat/feature-2" FEAT_BRANCH_3_NAME = "feat/feature-3" FEAT_BRANCH_4_NAME = "feat/feature-4" FIX_BRANCH_1_NAME = "fix/patch-1" FIX_BRANCH_2_NAME = "fix/patch-2" @pytest.fixture(scope="session") def deps_files_4_git_flow_repo_w_4_release_channels( deps_files_4_example_git_project: list[Path], ) -> list[Path]: return [ *deps_files_4_example_git_project, # This file Path(__file__).absolute(), # because of imports Path(tests.const.__file__).absolute(), Path(tests.util.__file__).absolute(), # because of the fixtures Path(tests.conftest.__file__).absolute(), ] @pytest.fixture(scope="session") def build_spec_hash_4_git_flow_repo_w_4_release_channels( get_md5_for_set_of_files: GetMd5ForSetOfFilesFn, deps_files_4_git_flow_repo_w_4_release_channels: list[Path], ) -> str: # Generates a hash of the build spec to set when to invalidate the cache return get_md5_for_set_of_files(deps_files_4_git_flow_repo_w_4_release_channels) @pytest.fixture(scope="session") def get_repo_definition_4_git_flow_repo_w_4_release_channels( convert_commit_specs_to_commit_defs: ConvertCommitSpecsToCommitDefsFn, convert_commit_spec_to_commit_def: ConvertCommitSpecToCommitDefFn, format_merge_commit_msg_git: FormatGitMergeCommitMsgFn, changelog_md_file: Path, changelog_rst_file: Path, stable_now_date: GetStableDateNowFn, ) -> GetRepoDefinitionFn: """ This fixture returns a function that when called will define the actions needed to build a git repo that uses the git flow branching strategy and git merge commits with 4 release channels. This very complex repository mirrors the git flow example provided by a user in issue [#789](https://github.com/python-semantic-release/python-semantic-release/issues/789). 1. [feature branches] revision releases which include build-metadata of the branch name (slightly differs from user where the release also used alpha+build-metadata) 2. [dev branch] alpha feature releases (x.x.x-alpha.x) 3. [beta branch] beta releases (x.x.x-beta.x) 4. [main branch] official (production) releases (x.x.x) """ def _get_repo_from_defintion( commit_type: CommitConvention, hvcs_client_name: str = "github", hvcs_domain: str = EXAMPLE_HVCS_DOMAIN, tag_format_str: str | None = None, extra_configs: dict[str, TomlSerializableTypes] | None = None, mask_initial_release: bool = False, ) -> Sequence[RepoActions]: stable_now_datetime = stable_now_date() commit_timestamp_gen = ( (stable_now_datetime + timedelta(seconds=i)).isoformat(timespec="seconds") for i in count(step=1) ) # Common static actions or components changelog_file_definitons: Sequence[RepoActionWriteChangelogsDestFile] = [ { "path": changelog_md_file, "format": ChangelogOutputFormat.MARKDOWN, }, { "path": changelog_rst_file, "format": ChangelogOutputFormat.RESTRUCTURED_TEXT, }, ] fast_forward_dev_branch_actions: Sequence[RepoActions] = [ { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": DEV_BRANCH_NAME}, }, { "action": RepoActionStep.GIT_MERGE, "details": { "branch_name": BETA_BRANCH_NAME, "fast_forward": True, }, }, ] fast_forward_beta_branch_actions: Sequence[RepoActions] = [ { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": BETA_BRANCH_NAME}, }, { "action": RepoActionStep.GIT_MERGE, "details": { "branch_name": DEFAULT_BRANCH_NAME, "fast_forward": True, }, }, ] merge_dev_into_beta: RepoActionGitMerge = { "action": RepoActionStep.GIT_MERGE, "details": { "branch_name": DEV_BRANCH_NAME, "fast_forward": False, "commit_def": convert_commit_spec_to_commit_def( { "conventional": format_merge_commit_msg_git( branch_name=DEV_BRANCH_NAME, tgt_branch_name=BETA_BRANCH_NAME, ), "emoji": format_merge_commit_msg_git( branch_name=DEV_BRANCH_NAME, tgt_branch_name=BETA_BRANCH_NAME, ), "scipy": format_merge_commit_msg_git( branch_name=DEV_BRANCH_NAME, tgt_branch_name=BETA_BRANCH_NAME, ), "datetime": next(commit_timestamp_gen), "include_in_changelog": bool(commit_type == "emoji"), }, commit_type, ), }, } merge_beta_into_main: RepoActionGitMerge = { "action": RepoActionStep.GIT_MERGE, "details": { "branch_name": BETA_BRANCH_NAME, "fast_forward": False, "commit_def": convert_commit_spec_to_commit_def( { "conventional": format_merge_commit_msg_git( branch_name=BETA_BRANCH_NAME, tgt_branch_name=DEFAULT_BRANCH_NAME, ), "emoji": format_merge_commit_msg_git( branch_name=BETA_BRANCH_NAME, tgt_branch_name=DEFAULT_BRANCH_NAME, ), "scipy": format_merge_commit_msg_git( branch_name=BETA_BRANCH_NAME, tgt_branch_name=DEFAULT_BRANCH_NAME, ), "datetime": next(commit_timestamp_gen), "include_in_changelog": bool(commit_type == "emoji"), }, commit_type, ), }, } # Define All the steps required to create the repository repo_construction_steps: list[RepoActions] = [] repo_construction_steps.append( { "action": RepoActionStep.CONFIGURE, "details": { "commit_type": commit_type, "hvcs_client_name": hvcs_client_name, "hvcs_domain": hvcs_domain, "tag_format_str": tag_format_str, "mask_initial_release": mask_initial_release, "extra_configs": { # Set the default release branch "tool.semantic_release.branches.main": { "match": rf"^{DEFAULT_BRANCH_NAME}$", "prerelease": False, }, # branch "beta" has prerelease suffix of "beta" "tool.semantic_release.branches.beta": { "match": rf"^{BETA_BRANCH_NAME}$", "prerelease": True, "prerelease_token": "beta", }, # branch "development" has prerelease suffix of "alpha" "tool.semantic_release.branches.dev": { "match": rf"^{DEV_BRANCH_NAME}$", "prerelease": True, "prerelease_token": "alpha", }, # branch "feat/*" has prerelease suffix of "rev" "tool.semantic_release.branches.features": { "match": r"^feat/.+", "prerelease": True, "prerelease_token": "rev", }, "tool.semantic_release.allow_zero_version": False, **(extra_configs or {}), }, }, } ) # Make initial release new_version = "1.0.0" repo_construction_steps.extend( [ { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": [ # only one commit to start the main branch convert_commit_spec_to_commit_def( { "conventional": INITIAL_COMMIT_MESSAGE, "emoji": INITIAL_COMMIT_MESSAGE, "scipy": INITIAL_COMMIT_MESSAGE, "datetime": next(commit_timestamp_gen), "include_in_changelog": bool( commit_type == "emoji" ), }, commit_type, ), ], }, }, { "action": RepoActionStep.GIT_CHECKOUT, "details": { "create_branch": { "name": BETA_BRANCH_NAME, "start_branch": DEFAULT_BRANCH_NAME, }, }, }, { "action": RepoActionStep.GIT_CHECKOUT, "details": { "create_branch": { "name": DEV_BRANCH_NAME, "start_branch": BETA_BRANCH_NAME, }, }, }, { "action": RepoActionStep.GIT_CHECKOUT, "details": { "create_branch": { "name": FEAT_BRANCH_1_NAME, "start_branch": DEV_BRANCH_NAME, }, }, }, { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "conventional": "feat: add new feature", "emoji": ":sparkles: add new feature", "scipy": "ENH: add new feature", "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, ), }, }, { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": DEV_BRANCH_NAME}, }, { "action": RepoActionStep.GIT_MERGE, "details": { "branch_name": FEAT_BRANCH_1_NAME, "fast_forward": False, "commit_def": convert_commit_spec_to_commit_def( { "conventional": format_merge_commit_msg_git( branch_name=FEAT_BRANCH_1_NAME, tgt_branch_name=DEV_BRANCH_NAME, ), "emoji": format_merge_commit_msg_git( branch_name=FEAT_BRANCH_1_NAME, tgt_branch_name=DEV_BRANCH_NAME, ), "scipy": format_merge_commit_msg_git( branch_name=FEAT_BRANCH_1_NAME, tgt_branch_name=DEV_BRANCH_NAME, ), "datetime": next(commit_timestamp_gen), "include_in_changelog": bool(commit_type == "emoji"), }, commit_type, ), }, }, { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": BETA_BRANCH_NAME}, }, { **merge_dev_into_beta, }, { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": DEFAULT_BRANCH_NAME}, }, { **merge_beta_into_main, }, { "action": RepoActionStep.RELEASE, "details": { "version": new_version, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitons, }, }, ], }, }, ] ) # Make a fix and release it as an alpha release new_version = "1.0.1-alpha.1" repo_construction_steps.extend( [ *fast_forward_beta_branch_actions, *fast_forward_dev_branch_actions, { "action": RepoActionStep.GIT_CHECKOUT, "details": { "create_branch": { "name": FIX_BRANCH_1_NAME, "start_branch": DEV_BRANCH_NAME, } }, }, { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "conventional": "fix(cli): fix config cli command", "emoji": ":bug: (cli) fix config cli command", "scipy": "BUG(cli): fix config cli command", "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, ), }, }, { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": DEV_BRANCH_NAME}, }, { "action": RepoActionStep.GIT_MERGE, "details": { "branch_name": FIX_BRANCH_1_NAME, "fast_forward": False, "commit_def": convert_commit_spec_to_commit_def( { "conventional": format_merge_commit_msg_git( branch_name=FIX_BRANCH_1_NAME, tgt_branch_name=DEV_BRANCH_NAME, ), "emoji": format_merge_commit_msg_git( branch_name=FIX_BRANCH_1_NAME, tgt_branch_name=DEV_BRANCH_NAME, ), "scipy": format_merge_commit_msg_git( branch_name=FIX_BRANCH_1_NAME, tgt_branch_name=DEV_BRANCH_NAME, ), "datetime": next(commit_timestamp_gen), "include_in_changelog": bool(commit_type == "emoji"), }, commit_type, ), }, }, { "action": RepoActionStep.RELEASE, "details": { "version": new_version, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitons, }, }, ], }, }, ] ) # Merge in the successful alpha release and create a beta release new_version = "1.0.1-beta.1" repo_construction_steps.extend( [ { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": BETA_BRANCH_NAME}, }, { **merge_dev_into_beta, }, { "action": RepoActionStep.RELEASE, "details": { "version": new_version, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitons, }, }, ], }, }, ] ) # Fix a bug found in beta release and create a new alpha release new_version = "1.0.1-alpha.2" repo_construction_steps.extend( [ *fast_forward_dev_branch_actions, { "action": RepoActionStep.GIT_CHECKOUT, "details": { "create_branch": { "name": FIX_BRANCH_2_NAME, "start_branch": DEV_BRANCH_NAME, } }, }, { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "conventional": "fix(config): fix config option", "emoji": ":bug: (config) fix config option", "scipy": "BUG(config): fix config option", "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, ), }, }, { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": DEV_BRANCH_NAME}, }, { "action": RepoActionStep.GIT_MERGE, "details": { "branch_name": FIX_BRANCH_2_NAME, "fast_forward": False, "commit_def": convert_commit_spec_to_commit_def( { "conventional": format_merge_commit_msg_git( branch_name=FIX_BRANCH_2_NAME, tgt_branch_name=DEV_BRANCH_NAME, ), "emoji": format_merge_commit_msg_git( branch_name=FIX_BRANCH_2_NAME, tgt_branch_name=DEV_BRANCH_NAME, ), "scipy": format_merge_commit_msg_git( branch_name=FIX_BRANCH_2_NAME, tgt_branch_name=DEV_BRANCH_NAME, ), "datetime": next(commit_timestamp_gen), "include_in_changelog": bool(commit_type == "emoji"), }, commit_type, ), }, }, { "action": RepoActionStep.RELEASE, "details": { "version": new_version, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitons, }, }, ], }, }, ] ) # Merge in the 2nd successful alpha release and create a secondary beta release new_version = "1.0.1-beta.2" repo_construction_steps.extend( [ { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": BETA_BRANCH_NAME}, }, { **merge_dev_into_beta, }, { "action": RepoActionStep.RELEASE, "details": { "version": new_version, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitons, }, }, ], }, }, ] ) # Add a new feature (another developer was working on) and create a release for it # Based on Semver standard, Build metadata is restricted to [A-Za-z0-9-] so we replace the '/' with a '-' new_version = f"""1.1.0-rev.1+{FEAT_BRANCH_2_NAME.replace("/", '-')}""" repo_construction_steps.extend( [ *fast_forward_dev_branch_actions, { "action": RepoActionStep.GIT_CHECKOUT, "details": { "create_branch": { "name": FEAT_BRANCH_2_NAME, "start_branch": DEV_BRANCH_NAME, } }, }, { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "conventional": "feat(feat-2): add another primary feature", "emoji": ":sparkles: (feat-2) add another primary feature", "scipy": "ENH(feat-2): add another primary feature", "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, ), }, }, { "action": RepoActionStep.RELEASE, "details": { "version": new_version, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitons, }, }, ], }, }, ] ) # Merge in the successful revision release and create an alpha release new_version = "1.1.0-alpha.1" repo_construction_steps.extend( [ { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": DEV_BRANCH_NAME}, }, { "action": RepoActionStep.GIT_MERGE, "details": { "branch_name": FEAT_BRANCH_2_NAME, "fast_forward": False, "commit_def": convert_commit_spec_to_commit_def( { "conventional": format_merge_commit_msg_git( branch_name=FEAT_BRANCH_2_NAME, tgt_branch_name=DEV_BRANCH_NAME, ), "emoji": format_merge_commit_msg_git( branch_name=FEAT_BRANCH_2_NAME, tgt_branch_name=DEV_BRANCH_NAME, ), "scipy": format_merge_commit_msg_git( branch_name=FEAT_BRANCH_2_NAME, tgt_branch_name=DEV_BRANCH_NAME, ), "datetime": next(commit_timestamp_gen), "include_in_changelog": bool(commit_type == "emoji"), }, commit_type, ), }, }, { "action": RepoActionStep.RELEASE, "details": { "version": new_version, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitons, }, }, ], }, }, ] ) # Merge in the successful alpha release and create a beta release new_version = "1.1.0-beta.1" repo_construction_steps.extend( [ { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": BETA_BRANCH_NAME}, }, { **merge_dev_into_beta, }, { "action": RepoActionStep.RELEASE, "details": { "version": new_version, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitons, }, }, ], }, }, ] ) # officially release the sucessful release candidate to production new_version = "1.1.0" repo_construction_steps.extend( [ { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": DEFAULT_BRANCH_NAME}, }, { **merge_beta_into_main, }, { "action": RepoActionStep.RELEASE, "details": { "version": new_version, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitons, }, }, ], }, }, ] ) return repo_construction_steps return _get_repo_from_defintion @pytest.fixture(scope="session") def build_git_flow_repo_w_4_release_channels( build_repo_from_definition: BuildRepoFromDefinitionFn, get_repo_definition_4_git_flow_repo_w_4_release_channels: GetRepoDefinitionFn, get_cached_repo_data: GetCachedRepoDataFn, build_repo_or_copy_cache: BuildRepoOrCopyCacheFn, build_spec_hash_4_git_flow_repo_w_4_release_channels: str, ) -> BuildSpecificRepoFn: def _build_specific_repo_type( repo_name: str, commit_type: CommitConvention, dest_dir: Path ) -> Sequence[RepoActions]: def _build_repo(cached_repo_path: Path) -> Sequence[RepoActions]: repo_construction_steps = ( get_repo_definition_4_git_flow_repo_w_4_release_channels( commit_type=commit_type, ) ) return build_repo_from_definition(cached_repo_path, repo_construction_steps) build_repo_or_copy_cache( repo_name=repo_name, build_spec_hash=build_spec_hash_4_git_flow_repo_w_4_release_channels, build_repo_func=_build_repo, dest_dir=dest_dir, ) if not (cached_repo_data := get_cached_repo_data(proj_dirname=repo_name)): raise ValueError("Failed to retrieve repo data from cache") return cached_repo_data["build_definition"] return _build_specific_repo_type # --------------------------------------------------------------------------- # # Test-level fixtures that will cache the built directory & set up test case # # --------------------------------------------------------------------------- # @pytest.fixture def repo_w_git_flow_w_beta_alpha_rev_prereleases_n_conventional_commits( build_git_flow_repo_w_4_release_channels: BuildSpecificRepoFn, example_project_git_repo: ExProjectGitRepoFn, example_project_dir: ExProjectDir, change_to_ex_proj_dir: None, ) -> BuiltRepoResult: repo_name = ( repo_w_git_flow_w_beta_alpha_rev_prereleases_n_conventional_commits.__name__ ) commit_type: CommitConvention = repo_name.split("_")[-2] # type: ignore[assignment] return { "definition": build_git_flow_repo_w_4_release_channels( repo_name=repo_name, commit_type=commit_type, dest_dir=example_project_dir, ), "repo": example_project_git_repo(), } @pytest.fixture def repo_w_git_flow_w_beta_alpha_rev_prereleases_n_emoji_commits( build_git_flow_repo_w_4_release_channels: BuildSpecificRepoFn, example_project_git_repo: ExProjectGitRepoFn, example_project_dir: ExProjectDir, change_to_ex_proj_dir: None, ) -> BuiltRepoResult: repo_name = repo_w_git_flow_w_beta_alpha_rev_prereleases_n_emoji_commits.__name__ commit_type: CommitConvention = repo_name.split("_")[-2] # type: ignore[assignment] return { "definition": build_git_flow_repo_w_4_release_channels( repo_name=repo_name, commit_type=commit_type, dest_dir=example_project_dir, ), "repo": example_project_git_repo(), } @pytest.fixture def repo_w_git_flow_w_beta_alpha_rev_prereleases_n_scipy_commits( build_git_flow_repo_w_4_release_channels: BuildSpecificRepoFn, example_project_git_repo: ExProjectGitRepoFn, example_project_dir: ExProjectDir, change_to_ex_proj_dir: None, ) -> BuiltRepoResult: repo_name = repo_w_git_flow_w_beta_alpha_rev_prereleases_n_scipy_commits.__name__ commit_type: CommitConvention = repo_name.split("_")[-2] # type: ignore[assignment] return { "definition": build_git_flow_repo_w_4_release_channels( repo_name=repo_name, commit_type=commit_type, dest_dir=example_project_dir, ), "repo": example_project_git_repo(), } python-semantic-release-9.21.0/tests/fixtures/repos/github_flow/000077500000000000000000000000001475670435200250345ustar00rootroot00000000000000python-semantic-release-9.21.0/tests/fixtures/repos/github_flow/__init__.py000066400000000000000000000002151475670435200271430ustar00rootroot00000000000000from tests.fixtures.repos.github_flow.repo_w_default_release import * from tests.fixtures.repos.github_flow.repo_w_release_channels import * python-semantic-release-9.21.0/tests/fixtures/repos/github_flow/repo_w_default_release.py000066400000000000000000000436101475670435200321110ustar00rootroot00000000000000from __future__ import annotations from datetime import timedelta from itertools import count from pathlib import Path from typing import TYPE_CHECKING import pytest from semantic_release.cli.config import ChangelogOutputFormat import tests.conftest import tests.const import tests.util from tests.const import ( DEFAULT_BRANCH_NAME, EXAMPLE_HVCS_DOMAIN, INITIAL_COMMIT_MESSAGE, RepoActionStep, ) if TYPE_CHECKING: from typing import Sequence from tests.conftest import ( GetCachedRepoDataFn, GetMd5ForSetOfFilesFn, GetStableDateNowFn, ) from tests.fixtures.example_project import ExProjectDir from tests.fixtures.git_repo import ( BuildRepoFromDefinitionFn, BuildRepoOrCopyCacheFn, BuildSpecificRepoFn, BuiltRepoResult, CommitConvention, CommitSpec, ConvertCommitSpecsToCommitDefsFn, ConvertCommitSpecToCommitDefFn, ExProjectGitRepoFn, FormatGitHubSquashCommitMsgFn, GetRepoDefinitionFn, RepoActions, RepoActionWriteChangelogsDestFile, TomlSerializableTypes, ) FIX_BRANCH_1_NAME = "fix/patch-1" FEAT_BRANCH_1_NAME = "feat/feature-1" @pytest.fixture(scope="session") def deps_files_4_github_flow_repo_w_default_release_channel( deps_files_4_example_git_project: list[Path], ) -> list[Path]: return [ *deps_files_4_example_git_project, # This file Path(__file__).absolute(), # because of imports Path(tests.const.__file__).absolute(), Path(tests.util.__file__).absolute(), # because of the fixtures Path(tests.conftest.__file__).absolute(), ] @pytest.fixture(scope="session") def build_spec_hash_4_github_flow_repo_w_default_release_channel( get_md5_for_set_of_files: GetMd5ForSetOfFilesFn, deps_files_4_github_flow_repo_w_default_release_channel: list[Path], ) -> str: # Generates a hash of the build spec to set when to invalidate the cache return get_md5_for_set_of_files( deps_files_4_github_flow_repo_w_default_release_channel ) @pytest.fixture(scope="session") def get_repo_definition_4_github_flow_repo_w_default_release_channel( convert_commit_specs_to_commit_defs: ConvertCommitSpecsToCommitDefsFn, convert_commit_spec_to_commit_def: ConvertCommitSpecToCommitDefFn, format_squash_commit_msg_github: FormatGitHubSquashCommitMsgFn, changelog_md_file: Path, changelog_rst_file: Path, stable_now_date: GetStableDateNowFn, ) -> GetRepoDefinitionFn: """ Builds a repository with the GitHub Flow branching strategy and a squash commit merging strategy for a single release channel on the default branch. """ def _get_repo_from_defintion( commit_type: CommitConvention, hvcs_client_name: str = "github", hvcs_domain: str = EXAMPLE_HVCS_DOMAIN, tag_format_str: str | None = None, extra_configs: dict[str, TomlSerializableTypes] | None = None, mask_initial_release: bool = False, ) -> Sequence[RepoActions]: stable_now_datetime = stable_now_date() commit_timestamp_gen = ( (stable_now_datetime + timedelta(seconds=i)).isoformat(timespec="seconds") for i in count(step=1) ) pr_num_gen = (i for i in count(start=2, step=1)) changelog_file_definitons: Sequence[RepoActionWriteChangelogsDestFile] = [ { "path": changelog_md_file, "format": ChangelogOutputFormat.MARKDOWN, }, { "path": changelog_rst_file, "format": ChangelogOutputFormat.RESTRUCTURED_TEXT, }, ] repo_construction_steps: list[RepoActions] = [] repo_construction_steps.append( { "action": RepoActionStep.CONFIGURE, "details": { "commit_type": commit_type, "hvcs_client_name": hvcs_client_name, "hvcs_domain": hvcs_domain, "tag_format_str": tag_format_str, "mask_initial_release": mask_initial_release, "extra_configs": { # Set the default release branch "tool.semantic_release.branches.main": { "match": r"^(main|master)$", "prerelease": False, }, "tool.semantic_release.allow_zero_version": False, "tool.semantic_release.commit_parser_options.parse_squash_commits": True, **(extra_configs or {}), }, }, } ) new_version = "1.0.0" repo_construction_steps.extend( [ { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "conventional": INITIAL_COMMIT_MESSAGE, "emoji": INITIAL_COMMIT_MESSAGE, "scipy": INITIAL_COMMIT_MESSAGE, "datetime": next(commit_timestamp_gen), "include_in_changelog": bool( commit_type == "emoji" ), }, { "conventional": "feat: add new feature", "emoji": ":sparkles: add new feature", "scipy": "ENH: add new feature", "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, ), }, }, { "action": RepoActionStep.RELEASE, "details": { "version": new_version, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitons, }, }, ], }, }, ] ) fix_branch_1_commits: Sequence[CommitSpec] = [ { "conventional": "fix(cli): add missing text", "emoji": ":bug: add missing text", "scipy": "MAINT: add missing text", "datetime": next(commit_timestamp_gen), }, ] repo_construction_steps.extend( [ { "action": RepoActionStep.GIT_CHECKOUT, "details": { "create_branch": { "name": FIX_BRANCH_1_NAME, "start_branch": DEFAULT_BRANCH_NAME, } }, }, { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { **commit, "include_in_changelog": False, } for commit in fix_branch_1_commits ], commit_type, ), }, }, ] ) # simulate separate work by another person at same time as the fix branch feat_branch_1_commits: Sequence[CommitSpec] = [ { "conventional": "feat(cli): add cli interface", "emoji": ":sparkles: add cli interface", "scipy": "ENH: add cli interface", "datetime": next(commit_timestamp_gen), }, { "conventional": "test(cli): add cli tests", "emoji": ":checkmark: add cli tests", "scipy": "TST: add cli tests", "datetime": next(commit_timestamp_gen), }, { "conventional": "docs(cli): add cli documentation", "emoji": ":memo: add cli documentation", "scipy": "DOC: add cli documentation", "datetime": next(commit_timestamp_gen), }, ] repo_construction_steps.extend( [ { "action": RepoActionStep.GIT_CHECKOUT, "details": { "create_branch": { "name": FEAT_BRANCH_1_NAME, "start_branch": DEFAULT_BRANCH_NAME, }, }, }, { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { **commit, "include_in_changelog": False, } for commit in feat_branch_1_commits ], commit_type, ) }, }, ] ) new_version = "1.0.1" all_commit_types: list[CommitConvention] = ["conventional", "emoji", "scipy"] fix_branch_pr_number = next(pr_num_gen) fix_branch_squash_commit_spec: CommitSpec = { **{ # type: ignore[typeddict-item] cmt_type: format_squash_commit_msg_github( # Use the primary commit message as the PR title pr_title=fix_branch_1_commits[0][cmt_type], pr_number=fix_branch_pr_number, # No squashed commits since there is only one commit squashed_commits=[], ) for cmt_type in all_commit_types }, "datetime": next(commit_timestamp_gen), "include_in_changelog": True, } repo_construction_steps.extend( [ { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": DEFAULT_BRANCH_NAME}, }, { "action": RepoActionStep.GIT_SQUASH, "details": { "branch": FIX_BRANCH_1_NAME, "strategy_option": "theirs", "commit_def": convert_commit_spec_to_commit_def( fix_branch_squash_commit_spec, commit_type, ), }, }, { "action": RepoActionStep.RELEASE, "details": { "version": new_version, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitons, }, }, ], }, }, ] ) feat_branch_pr_number = next(pr_num_gen) feat_branch_squash_commit_spec: CommitSpec = { **{ # type: ignore[typeddict-item] cmt_type: format_squash_commit_msg_github( # Use the primary commit message as the PR title pr_title=feat_branch_1_commits[0][cmt_type], pr_number=feat_branch_pr_number, squashed_commits=[ cmt[commit_type] for cmt in feat_branch_1_commits ], ) for cmt_type in all_commit_types }, "datetime": next(commit_timestamp_gen), "include_in_changelog": True, } new_version = "1.1.0" repo_construction_steps.extend( [ { "action": RepoActionStep.GIT_SQUASH, "details": { "branch": FEAT_BRANCH_1_NAME, "strategy_option": "theirs", "commit_def": convert_commit_spec_to_commit_def( feat_branch_squash_commit_spec, commit_type, ), }, }, { "action": RepoActionStep.RELEASE, "details": { "version": new_version, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitons, }, }, ], }, }, ] ) return repo_construction_steps return _get_repo_from_defintion @pytest.fixture(scope="session") def build_repo_w_github_flow_w_default_release_channel( build_repo_from_definition: BuildRepoFromDefinitionFn, get_repo_definition_4_github_flow_repo_w_default_release_channel: GetRepoDefinitionFn, get_cached_repo_data: GetCachedRepoDataFn, build_repo_or_copy_cache: BuildRepoOrCopyCacheFn, build_spec_hash_4_github_flow_repo_w_default_release_channel: str, ) -> BuildSpecificRepoFn: def _build_specific_repo_type( repo_name: str, commit_type: CommitConvention, dest_dir: Path ) -> Sequence[RepoActions]: def _build_repo(cached_repo_path: Path) -> Sequence[RepoActions]: repo_construction_steps = ( get_repo_definition_4_github_flow_repo_w_default_release_channel( commit_type=commit_type, ) ) return build_repo_from_definition(cached_repo_path, repo_construction_steps) build_repo_or_copy_cache( repo_name=repo_name, build_spec_hash=build_spec_hash_4_github_flow_repo_w_default_release_channel, build_repo_func=_build_repo, dest_dir=dest_dir, ) if not (cached_repo_data := get_cached_repo_data(proj_dirname=repo_name)): raise ValueError("Failed to retrieve repo data from cache") return cached_repo_data["build_definition"] return _build_specific_repo_type # --------------------------------------------------------------------------- # # Test-level fixtures that will cache the built directory & set up test case # # --------------------------------------------------------------------------- # @pytest.fixture def repo_w_github_flow_w_default_release_channel_conventional_commits( build_repo_w_github_flow_w_default_release_channel: BuildSpecificRepoFn, example_project_git_repo: ExProjectGitRepoFn, example_project_dir: ExProjectDir, change_to_ex_proj_dir: None, ) -> BuiltRepoResult: repo_name = ( repo_w_github_flow_w_default_release_channel_conventional_commits.__name__ ) commit_type: CommitConvention = repo_name.split("_")[-2] # type: ignore[assignment] return { "definition": build_repo_w_github_flow_w_default_release_channel( repo_name=repo_name, commit_type=commit_type, dest_dir=example_project_dir, ), "repo": example_project_git_repo(), } @pytest.fixture def repo_w_github_flow_w_default_release_channel_emoji_commits( build_repo_w_github_flow_w_default_release_channel: BuildSpecificRepoFn, example_project_git_repo: ExProjectGitRepoFn, example_project_dir: ExProjectDir, change_to_ex_proj_dir: None, ) -> BuiltRepoResult: repo_name = repo_w_github_flow_w_default_release_channel_emoji_commits.__name__ commit_type: CommitConvention = repo_name.split("_")[-2] # type: ignore[assignment] return { "definition": build_repo_w_github_flow_w_default_release_channel( repo_name=repo_name, commit_type=commit_type, dest_dir=example_project_dir, ), "repo": example_project_git_repo(), } @pytest.fixture def repo_w_github_flow_w_default_release_channel_scipy_commits( build_repo_w_github_flow_w_default_release_channel: BuildSpecificRepoFn, example_project_git_repo: ExProjectGitRepoFn, example_project_dir: ExProjectDir, change_to_ex_proj_dir: None, ) -> BuiltRepoResult: repo_name = repo_w_github_flow_w_default_release_channel_scipy_commits.__name__ commit_type: CommitConvention = repo_name.split("_")[-2] # type: ignore[assignment] return { "definition": build_repo_w_github_flow_w_default_release_channel( repo_name=repo_name, commit_type=commit_type, dest_dir=example_project_dir, ), "repo": example_project_git_repo(), } python-semantic-release-9.21.0/tests/fixtures/repos/github_flow/repo_w_release_channels.py000066400000000000000000000522471475670435200322660ustar00rootroot00000000000000from __future__ import annotations from datetime import timedelta from itertools import count from pathlib import Path from typing import TYPE_CHECKING import pytest from semantic_release.cli.config import ChangelogOutputFormat import tests.conftest import tests.const import tests.util from tests.const import ( DEFAULT_BRANCH_NAME, EXAMPLE_HVCS_DOMAIN, INITIAL_COMMIT_MESSAGE, RepoActionStep, ) if TYPE_CHECKING: from typing import Sequence from tests.conftest import ( GetCachedRepoDataFn, GetMd5ForSetOfFilesFn, GetStableDateNowFn, ) from tests.fixtures.example_project import ExProjectDir from tests.fixtures.git_repo import ( BuildRepoFromDefinitionFn, BuildRepoOrCopyCacheFn, BuildSpecificRepoFn, BuiltRepoResult, CommitConvention, ConvertCommitSpecsToCommitDefsFn, ConvertCommitSpecToCommitDefFn, ExProjectGitRepoFn, FormatGitHubMergeCommitMsgFn, GetRepoDefinitionFn, RepoActions, RepoActionWriteChangelogsDestFile, TomlSerializableTypes, ) FIX_BRANCH_1_NAME = "fix/patch-1" FEAT_BRANCH_1_NAME = "feat/feature-1" FEAT_BRANCH_2_NAME = "feat/feature-2" @pytest.fixture(scope="session") def deps_files_4_github_flow_repo_w_feature_release_channel( deps_files_4_example_git_project: list[Path], ) -> list[Path]: return [ *deps_files_4_example_git_project, # This file Path(__file__).absolute(), # because of imports Path(tests.const.__file__).absolute(), Path(tests.util.__file__).absolute(), # because of the fixtures Path(tests.conftest.__file__).absolute(), ] @pytest.fixture(scope="session") def build_spec_hash_4_github_flow_repo_w_feature_release_channel( get_md5_for_set_of_files: GetMd5ForSetOfFilesFn, deps_files_4_github_flow_repo_w_feature_release_channel: list[Path], ) -> str: # Generates a hash of the build spec to set when to invalidate the cache return get_md5_for_set_of_files( deps_files_4_github_flow_repo_w_feature_release_channel ) @pytest.fixture(scope="session") def get_repo_definition_4_github_flow_repo_w_feature_release_channel( convert_commit_specs_to_commit_defs: ConvertCommitSpecsToCommitDefsFn, convert_commit_spec_to_commit_def: ConvertCommitSpecToCommitDefFn, format_merge_commit_msg_github: FormatGitHubMergeCommitMsgFn, changelog_md_file: Path, changelog_rst_file: Path, stable_now_date: GetStableDateNowFn, ) -> GetRepoDefinitionFn: """ Builds a repository with the GitHub Flow branching strategy using merge commits for alpha feature releases and official releases on the default branch. """ def _get_repo_from_defintion( commit_type: CommitConvention, hvcs_client_name: str = "github", hvcs_domain: str = EXAMPLE_HVCS_DOMAIN, tag_format_str: str | None = None, extra_configs: dict[str, TomlSerializableTypes] | None = None, mask_initial_release: bool = False, ) -> Sequence[RepoActions]: stable_now_datetime = stable_now_date() commit_timestamp_gen = ( (stable_now_datetime + timedelta(seconds=i)).isoformat(timespec="seconds") for i in count(step=1) ) pr_num_gen = (i for i in count(start=2, step=1)) changelog_file_definitons: Sequence[RepoActionWriteChangelogsDestFile] = [ { "path": changelog_md_file, "format": ChangelogOutputFormat.MARKDOWN, }, { "path": changelog_rst_file, "format": ChangelogOutputFormat.RESTRUCTURED_TEXT, }, ] repo_construction_steps: list[RepoActions] = [] repo_construction_steps.append( { "action": RepoActionStep.CONFIGURE, "details": { "commit_type": commit_type, "hvcs_client_name": hvcs_client_name, "hvcs_domain": hvcs_domain, "tag_format_str": tag_format_str, "mask_initial_release": mask_initial_release, "extra_configs": { # Set the default release branch "tool.semantic_release.branches.main": { "match": r"^(main|master)$", "prerelease": False, }, # branch "feat/" & "fix/" has prerelease suffix of "alpha" "tool.semantic_release.branches.alpha-release": { "match": r"^(feat|fix)/.+", "prerelease": True, "prerelease_token": "alpha", }, "tool.semantic_release.allow_zero_version": False, **(extra_configs or {}), }, }, } ) # Make initial release new_version = "1.0.0" repo_construction_steps.extend( [ { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "conventional": INITIAL_COMMIT_MESSAGE, "emoji": INITIAL_COMMIT_MESSAGE, "scipy": INITIAL_COMMIT_MESSAGE, "datetime": next(commit_timestamp_gen), "include_in_changelog": bool( commit_type == "emoji" ), }, { "conventional": "feat: add new feature", "emoji": ":sparkles: add new feature", "scipy": "ENH: add new feature", "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, ), }, }, { "action": RepoActionStep.RELEASE, "details": { "version": new_version, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitons, }, }, ], }, }, ] ) # Make a fix and release it as an alpha release new_version = "1.0.1-alpha.1" repo_construction_steps.extend( [ { "action": RepoActionStep.GIT_CHECKOUT, "details": { "create_branch": { "name": FIX_BRANCH_1_NAME, "start_branch": DEFAULT_BRANCH_NAME, } }, }, { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "conventional": "fix: correct some text", "emoji": ":bug: correct some text", "scipy": "MAINT: correct some text", "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, ), }, }, { "action": RepoActionStep.RELEASE, "details": { "version": new_version, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitons, }, }, ], }, }, ] ) # Update the fix and release another alpha release new_version = "1.0.1-alpha.2" repo_construction_steps.extend( [ { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "conventional": "fix: adjust text to resolve", "emoji": ":bug: adjust text to resolve", "scipy": "MAINT: adjust text to resolve", "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, ), }, }, { "action": RepoActionStep.RELEASE, "details": { "version": new_version, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitons, }, }, ], }, }, ] ) # Merge the fix branch into the default branch and formally release it new_version = "1.0.1" fix_branch_pr_number = next(pr_num_gen) repo_construction_steps.extend( [ { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": DEFAULT_BRANCH_NAME}, }, { "action": RepoActionStep.GIT_MERGE, "details": { "branch_name": FIX_BRANCH_1_NAME, "fast_forward": False, "commit_def": convert_commit_spec_to_commit_def( { "conventional": format_merge_commit_msg_github( pr_number=fix_branch_pr_number, branch_name=FIX_BRANCH_1_NAME, ), "emoji": format_merge_commit_msg_github( pr_number=fix_branch_pr_number, branch_name=FIX_BRANCH_1_NAME, ), "scipy": format_merge_commit_msg_github( pr_number=fix_branch_pr_number, branch_name=FIX_BRANCH_1_NAME, ), "datetime": next(commit_timestamp_gen), "include_in_changelog": bool(commit_type == "emoji"), }, commit_type, ), }, }, { "action": RepoActionStep.RELEASE, "details": { "version": new_version, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitons, }, }, ], }, }, ] ) # Make a feature branch and release it as an alpha release new_version = "1.1.0-alpha.1" repo_construction_steps.extend( [ { "action": RepoActionStep.GIT_CHECKOUT, "details": { "create_branch": { "name": FEAT_BRANCH_1_NAME, "start_branch": DEFAULT_BRANCH_NAME, }, }, }, { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "conventional": "feat(cli): add cli interface", "emoji": ":sparkles: add cli interface", "scipy": "ENH: add cli interface", "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, ) }, }, { "action": RepoActionStep.RELEASE, "details": { "version": new_version, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitons, }, }, ], }, }, ] ) # Merge the feature branch and officially release it new_version = "1.1.0" feat_branch_pr_number = next(pr_num_gen) repo_construction_steps.extend( [ { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": DEFAULT_BRANCH_NAME}, }, { "action": RepoActionStep.GIT_MERGE, "details": { "branch_name": FEAT_BRANCH_1_NAME, "fast_forward": False, "commit_def": convert_commit_spec_to_commit_def( { "conventional": format_merge_commit_msg_github( pr_number=feat_branch_pr_number, branch_name=FEAT_BRANCH_1_NAME, ), "emoji": format_merge_commit_msg_github( pr_number=feat_branch_pr_number, branch_name=FEAT_BRANCH_1_NAME, ), "scipy": format_merge_commit_msg_github( pr_number=feat_branch_pr_number, branch_name=FEAT_BRANCH_1_NAME, ), "datetime": next(commit_timestamp_gen), "include_in_changelog": bool(commit_type == "emoji"), }, commit_type, ), }, }, { "action": RepoActionStep.RELEASE, "details": { "version": new_version, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitons, }, }, ], }, }, ] ) return repo_construction_steps return _get_repo_from_defintion @pytest.fixture(scope="session") def build_repo_w_github_flow_w_feature_release_channel( build_repo_from_definition: BuildRepoFromDefinitionFn, get_repo_definition_4_github_flow_repo_w_feature_release_channel: GetRepoDefinitionFn, get_cached_repo_data: GetCachedRepoDataFn, build_repo_or_copy_cache: BuildRepoOrCopyCacheFn, build_spec_hash_4_github_flow_repo_w_feature_release_channel: str, ) -> BuildSpecificRepoFn: def _build_specific_repo_type( repo_name: str, commit_type: CommitConvention, dest_dir: Path ) -> Sequence[RepoActions]: def _build_repo(cached_repo_path: Path) -> Sequence[RepoActions]: repo_construction_steps = ( get_repo_definition_4_github_flow_repo_w_feature_release_channel( commit_type=commit_type, ) ) return build_repo_from_definition(cached_repo_path, repo_construction_steps) build_repo_or_copy_cache( repo_name=repo_name, build_spec_hash=build_spec_hash_4_github_flow_repo_w_feature_release_channel, build_repo_func=_build_repo, dest_dir=dest_dir, ) if not (cached_repo_data := get_cached_repo_data(proj_dirname=repo_name)): raise ValueError("Failed to retrieve repo data from cache") return cached_repo_data["build_definition"] return _build_specific_repo_type # --------------------------------------------------------------------------- # # Test-level fixtures that will cache the built directory & set up test case # # --------------------------------------------------------------------------- # @pytest.fixture def repo_w_github_flow_w_feature_release_channel_conventional_commits( build_repo_w_github_flow_w_feature_release_channel: BuildSpecificRepoFn, example_project_git_repo: ExProjectGitRepoFn, example_project_dir: ExProjectDir, change_to_ex_proj_dir: None, ) -> BuiltRepoResult: repo_name = ( repo_w_github_flow_w_feature_release_channel_conventional_commits.__name__ ) commit_type: CommitConvention = repo_name.split("_")[-2] # type: ignore[assignment] return { "definition": build_repo_w_github_flow_w_feature_release_channel( repo_name=repo_name, commit_type=commit_type, dest_dir=example_project_dir, ), "repo": example_project_git_repo(), } @pytest.fixture def repo_w_github_flow_w_feature_release_channel_emoji_commits( build_repo_w_github_flow_w_feature_release_channel: BuildSpecificRepoFn, example_project_git_repo: ExProjectGitRepoFn, example_project_dir: ExProjectDir, change_to_ex_proj_dir: None, ) -> BuiltRepoResult: repo_name = repo_w_github_flow_w_feature_release_channel_emoji_commits.__name__ commit_type: CommitConvention = repo_name.split("_")[-2] # type: ignore[assignment] return { "definition": build_repo_w_github_flow_w_feature_release_channel( repo_name=repo_name, commit_type=commit_type, dest_dir=example_project_dir, ), "repo": example_project_git_repo(), } @pytest.fixture def repo_w_github_flow_w_feature_release_channel_scipy_commits( build_repo_w_github_flow_w_feature_release_channel: BuildSpecificRepoFn, example_project_git_repo: ExProjectGitRepoFn, example_project_dir: ExProjectDir, change_to_ex_proj_dir: None, ) -> BuiltRepoResult: repo_name = repo_w_github_flow_w_feature_release_channel_scipy_commits.__name__ commit_type: CommitConvention = repo_name.split("_")[-2] # type: ignore[assignment] return { "definition": build_repo_w_github_flow_w_feature_release_channel( repo_name=repo_name, commit_type=commit_type, dest_dir=example_project_dir, ), "repo": example_project_git_repo(), } python-semantic-release-9.21.0/tests/fixtures/repos/repo_initial_commit.py000066400000000000000000000157361475670435200271370ustar00rootroot00000000000000from __future__ import annotations from pathlib import Path from typing import TYPE_CHECKING import pytest from semantic_release.cli.config import ChangelogOutputFormat import tests.conftest import tests.const import tests.util from tests.const import ( EXAMPLE_HVCS_DOMAIN, INITIAL_COMMIT_MESSAGE, RepoActionStep, ) if TYPE_CHECKING: from typing import Sequence from tests.conftest import ( GetCachedRepoDataFn, GetMd5ForSetOfFilesFn, GetStableDateNowFn, ) from tests.fixtures.example_project import ExProjectDir from tests.fixtures.git_repo import ( BuildRepoFromDefinitionFn, BuildRepoOrCopyCacheFn, BuildSpecificRepoFn, BuiltRepoResult, CommitConvention, ConvertCommitSpecsToCommitDefsFn, ExProjectGitRepoFn, GetRepoDefinitionFn, RepoActions, TomlSerializableTypes, ) @pytest.fixture(scope="session") def deps_files_4_repo_initial_commit( deps_files_4_example_git_project: list[Path], ) -> list[Path]: return [ *deps_files_4_example_git_project, # This file Path(__file__).absolute(), # because of imports Path(tests.const.__file__).absolute(), Path(tests.util.__file__).absolute(), # because of the fixtures Path(tests.conftest.__file__).absolute(), ] @pytest.fixture(scope="session") def build_spec_hash_4_repo_initial_commit( get_md5_for_set_of_files: GetMd5ForSetOfFilesFn, deps_files_4_repo_initial_commit: list[Path], ) -> str: # Generates a hash of the build spec to set when to invalidate the cache return get_md5_for_set_of_files(deps_files_4_repo_initial_commit) @pytest.fixture(scope="session") def get_repo_definition_4_repo_w_initial_commit( convert_commit_specs_to_commit_defs: ConvertCommitSpecsToCommitDefsFn, changelog_md_file: Path, changelog_rst_file: Path, stable_now_date: GetStableDateNowFn, ) -> GetRepoDefinitionFn: def _get_repo_from_defintion( commit_type: CommitConvention, hvcs_client_name: str = "github", hvcs_domain: str = EXAMPLE_HVCS_DOMAIN, tag_format_str: str | None = None, extra_configs: dict[str, TomlSerializableTypes] | None = None, mask_initial_release: bool = False, ) -> Sequence[RepoActions]: repo_construction_steps: list[RepoActions] = [] repo_construction_steps.extend( [ { "action": RepoActionStep.CONFIGURE, "details": { "commit_type": commit_type, "hvcs_client_name": hvcs_client_name, "hvcs_domain": hvcs_domain, "tag_format_str": tag_format_str, "mask_initial_release": mask_initial_release, "extra_configs": { # Set the default release branch "tool.semantic_release.branches.main": { "match": r"^(main|master)$", "prerelease": False, }, **(extra_configs or {}), }, }, }, { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "conventional": INITIAL_COMMIT_MESSAGE, "emoji": INITIAL_COMMIT_MESSAGE, "scipy": INITIAL_COMMIT_MESSAGE, "datetime": stable_now_date().isoformat( timespec="seconds" ), "include_in_changelog": bool( commit_type == "emoji" ), }, ], commit_type, ), }, }, { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": "Unreleased", "dest_files": [ { "path": changelog_md_file, "format": ChangelogOutputFormat.MARKDOWN, }, { "path": changelog_rst_file, "format": ChangelogOutputFormat.RESTRUCTURED_TEXT, }, ], }, }, ] ) return repo_construction_steps return _get_repo_from_defintion @pytest.fixture(scope="session") def build_repo_w_initial_commit( build_repo_from_definition: BuildRepoFromDefinitionFn, get_repo_definition_4_repo_w_initial_commit: GetRepoDefinitionFn, get_cached_repo_data: GetCachedRepoDataFn, build_repo_or_copy_cache: BuildRepoOrCopyCacheFn, build_spec_hash_4_repo_initial_commit: str, ) -> BuildSpecificRepoFn: def _build_specific_repo_type( repo_name: str, commit_type: CommitConvention, dest_dir: Path ) -> Sequence[RepoActions]: def _build_repo(cached_repo_path: Path) -> Sequence[RepoActions]: repo_construction_steps = get_repo_definition_4_repo_w_initial_commit( commit_type=commit_type, ) return build_repo_from_definition(cached_repo_path, repo_construction_steps) build_repo_or_copy_cache( repo_name=repo_name, build_spec_hash=build_spec_hash_4_repo_initial_commit, build_repo_func=_build_repo, dest_dir=dest_dir, ) if not (cached_repo_data := get_cached_repo_data(proj_dirname=repo_name)): raise ValueError("Failed to retrieve repo data from cache") return cached_repo_data["build_definition"] return _build_specific_repo_type # --------------------------------------------------------------------------- # # Test-level fixtures that will cache the built directory & set up test case # # --------------------------------------------------------------------------- # @pytest.fixture def repo_w_initial_commit( build_repo_w_initial_commit: BuildSpecificRepoFn, example_project_git_repo: ExProjectGitRepoFn, example_project_dir: ExProjectDir, change_to_ex_proj_dir: None, ) -> BuiltRepoResult: repo_name = repo_w_initial_commit.__name__ return { "definition": build_repo_w_initial_commit( repo_name=repo_name, commit_type="conventional", # not used but required dest_dir=example_project_dir, ), "repo": example_project_git_repo(), } python-semantic-release-9.21.0/tests/fixtures/repos/trunk_based_dev/000077500000000000000000000000001475670435200256625ustar00rootroot00000000000000python-semantic-release-9.21.0/tests/fixtures/repos/trunk_based_dev/__init__.py000066400000000000000000000005631475670435200277770ustar00rootroot00000000000000from tests.fixtures.repos.trunk_based_dev.repo_w_dual_version_support import * from tests.fixtures.repos.trunk_based_dev.repo_w_dual_version_support_w_prereleases import * from tests.fixtures.repos.trunk_based_dev.repo_w_no_tags import * from tests.fixtures.repos.trunk_based_dev.repo_w_prereleases import * from tests.fixtures.repos.trunk_based_dev.repo_w_tags import * python-semantic-release-9.21.0/tests/fixtures/repos/trunk_based_dev/repo_w_dual_version_support.py000066400000000000000000000421661475670435200341060ustar00rootroot00000000000000from __future__ import annotations from datetime import timedelta from itertools import count from pathlib import Path from typing import TYPE_CHECKING import pytest from semantic_release.cli.config import ChangelogOutputFormat import tests.conftest import tests.const import tests.util from tests.const import ( DEFAULT_BRANCH_NAME, EXAMPLE_HVCS_DOMAIN, INITIAL_COMMIT_MESSAGE, RepoActionStep, ) if TYPE_CHECKING: from typing import Sequence from tests.conftest import ( GetCachedRepoDataFn, GetMd5ForSetOfFilesFn, GetStableDateNowFn, ) from tests.fixtures.example_project import ( ExProjectDir, ) from tests.fixtures.git_repo import ( BuildRepoFromDefinitionFn, BuildRepoOrCopyCacheFn, BuildSpecificRepoFn, BuiltRepoResult, CommitConvention, ConvertCommitSpecsToCommitDefsFn, ExProjectGitRepoFn, GetRepoDefinitionFn, RepoActions, RepoActionWriteChangelogsDestFile, TomlSerializableTypes, ) MAINTENANCE_BRANCH_NAME = "v1.x" @pytest.fixture(scope="session") def deps_files_4_repo_w_dual_version_support( deps_files_4_example_git_project: list[Path], ) -> list[Path]: return [ *deps_files_4_example_git_project, # This file Path(__file__).absolute(), # because of imports Path(tests.const.__file__).absolute(), Path(tests.util.__file__).absolute(), # because of the fixtures Path(tests.conftest.__file__).absolute(), ] @pytest.fixture(scope="session") def build_spec_hash_4_repo_w_dual_version_support( get_md5_for_set_of_files: GetMd5ForSetOfFilesFn, deps_files_4_repo_w_dual_version_support: list[Path], ) -> str: # Generates a hash of the build spec to set when to invalidate the cache return get_md5_for_set_of_files(deps_files_4_repo_w_dual_version_support) @pytest.fixture(scope="session") def get_repo_definition_4_trunk_only_repo_w_dual_version_support( convert_commit_specs_to_commit_defs: ConvertCommitSpecsToCommitDefsFn, changelog_md_file: Path, changelog_rst_file: Path, stable_now_date: GetStableDateNowFn, ) -> GetRepoDefinitionFn: """ Builds a repository with trunk-only committing (no-branching) strategy with only official releases with latest and previous version support. """ def _get_repo_from_defintion( commit_type: CommitConvention, hvcs_client_name: str = "github", hvcs_domain: str = EXAMPLE_HVCS_DOMAIN, tag_format_str: str | None = None, extra_configs: dict[str, TomlSerializableTypes] | None = None, mask_initial_release: bool = False, ) -> Sequence[RepoActions]: stable_now_datetime = stable_now_date() commit_timestamp_gen = ( (stable_now_datetime + timedelta(seconds=i)).isoformat(timespec="seconds") for i in count(step=1) ) changelog_file_definitons: Sequence[RepoActionWriteChangelogsDestFile] = [ { "path": changelog_md_file, "format": ChangelogOutputFormat.MARKDOWN, }, { "path": changelog_rst_file, "format": ChangelogOutputFormat.RESTRUCTURED_TEXT, }, ] repo_construction_steps: list[RepoActions] = [] repo_construction_steps.append( { "action": RepoActionStep.CONFIGURE, "details": { "commit_type": commit_type, "hvcs_client_name": hvcs_client_name, "hvcs_domain": hvcs_domain, "tag_format_str": tag_format_str, "mask_initial_release": mask_initial_release, "extra_configs": { # Set the default release branch "tool.semantic_release.branches.latest": { "match": r"^(main|master)$", "prerelease": False, }, "tool.semantic_release.branches.maintenance": { "match": r"^v1\.x$", "prerelease": False, }, "tool.semantic_release.allow_zero_version": False, **(extra_configs or {}), }, }, } ) # Make initial release new_version = "1.0.0" repo_construction_steps.extend( [ { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "conventional": INITIAL_COMMIT_MESSAGE, "emoji": INITIAL_COMMIT_MESSAGE, "scipy": INITIAL_COMMIT_MESSAGE, "datetime": next(commit_timestamp_gen), "include_in_changelog": bool( commit_type == "emoji" ), }, { "conventional": "feat: add new feature", "emoji": ":sparkles: add new feature", "scipy": "ENH: add new feature", "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, ), }, }, { "action": RepoActionStep.RELEASE, "details": { "version": new_version, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitons, }, }, ], }, }, ] ) # Make a fix and officially release it new_version = "1.0.1" repo_construction_steps.extend( [ { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "conventional": "fix: correct some text", "emoji": ":bug: correct some text", "scipy": "MAINT: correct some text", "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, ), }, }, { "action": RepoActionStep.RELEASE, "details": { "version": new_version, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitons, }, }, ], }, }, ] ) # Make a breaking change and officially release it new_version = "2.0.0" repo_construction_steps.extend( [ { "action": RepoActionStep.GIT_CHECKOUT, "details": { "create_branch": { "name": MAINTENANCE_BRANCH_NAME, "start_branch": DEFAULT_BRANCH_NAME, } }, }, { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": DEFAULT_BRANCH_NAME}, }, { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "conventional": str.join( "\n\n", [ "feat: add revolutionary feature", "BREAKING CHANGE: this is a breaking change", ], ), "emoji": str.join( "\n\n", [ ":boom: add revolutionary feature", "This change is a breaking change", ], ), "scipy": str.join( "\n\n", [ "API: add revolutionary feature", "BREAKING CHANGE: this is a breaking change", ], ), "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, ), }, }, { "action": RepoActionStep.RELEASE, "details": { "version": new_version, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitons, }, }, ], }, }, ] ) # Fix a critical bug in the previous version and officially release it new_version = "1.0.2" repo_construction_steps.extend( [ { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": MAINTENANCE_BRANCH_NAME}, }, { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "conventional": "fix: correct critical bug", "emoji": ":bug: correct critical bug", "scipy": "MAINT: correct critical bug", "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, ), }, }, { "action": RepoActionStep.RELEASE, "details": { "version": new_version, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "max_version": new_version, "dest_files": changelog_file_definitons, }, }, ], }, }, ] ) # Return to the latest release variant repo_construction_steps.extend( [ { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": DEFAULT_BRANCH_NAME}, }, # TODO: return and make another release on the latest version # currently test variant of the changelog generator can't support this ] ) return repo_construction_steps return _get_repo_from_defintion @pytest.fixture(scope="session") def build_trunk_only_repo_w_dual_version_support( build_repo_from_definition: BuildRepoFromDefinitionFn, get_repo_definition_4_trunk_only_repo_w_dual_version_support: GetRepoDefinitionFn, get_cached_repo_data: GetCachedRepoDataFn, build_repo_or_copy_cache: BuildRepoOrCopyCacheFn, build_spec_hash_4_repo_w_dual_version_support: str, ) -> BuildSpecificRepoFn: def _build_specific_repo_type( repo_name: str, commit_type: CommitConvention, dest_dir: Path ) -> Sequence[RepoActions]: def _build_repo(cached_repo_path: Path) -> Sequence[RepoActions]: repo_construction_steps = ( get_repo_definition_4_trunk_only_repo_w_dual_version_support( commit_type=commit_type, ) ) return build_repo_from_definition(cached_repo_path, repo_construction_steps) build_repo_or_copy_cache( repo_name=repo_name, build_spec_hash=build_spec_hash_4_repo_w_dual_version_support, build_repo_func=_build_repo, dest_dir=dest_dir, ) if not (cached_repo_data := get_cached_repo_data(proj_dirname=repo_name)): raise ValueError("Failed to retrieve repo data from cache") return cached_repo_data["build_definition"] return _build_specific_repo_type # --------------------------------------------------------------------------- # # Test-level fixtures that will cache the built directory & set up test case # # --------------------------------------------------------------------------- # @pytest.fixture def repo_w_trunk_only_dual_version_spt_conventional_commits( build_trunk_only_repo_w_dual_version_support: BuildSpecificRepoFn, example_project_git_repo: ExProjectGitRepoFn, example_project_dir: ExProjectDir, change_to_ex_proj_dir: None, ) -> BuiltRepoResult: repo_name = repo_w_trunk_only_dual_version_spt_conventional_commits.__name__ commit_type: CommitConvention = repo_name.split("_")[-2] # type: ignore[assignment] return { "definition": build_trunk_only_repo_w_dual_version_support( repo_name=repo_name, commit_type=commit_type, dest_dir=example_project_dir, ), "repo": example_project_git_repo(), } @pytest.fixture def repo_w_trunk_only_dual_version_spt_emoji_commits( build_trunk_only_repo_w_dual_version_support: BuildSpecificRepoFn, example_project_git_repo: ExProjectGitRepoFn, example_project_dir: ExProjectDir, change_to_ex_proj_dir: None, ) -> BuiltRepoResult: repo_name = repo_w_trunk_only_dual_version_spt_emoji_commits.__name__ commit_type: CommitConvention = repo_name.split("_")[-2] # type: ignore[assignment] return { "definition": build_trunk_only_repo_w_dual_version_support( repo_name=repo_name, commit_type=commit_type, dest_dir=example_project_dir, ), "repo": example_project_git_repo(), } @pytest.fixture def repo_w_trunk_only_dual_version_spt_scipy_commits( build_trunk_only_repo_w_dual_version_support: BuildSpecificRepoFn, example_project_git_repo: ExProjectGitRepoFn, example_project_dir: ExProjectDir, change_to_ex_proj_dir: None, ) -> BuiltRepoResult: repo_name = repo_w_trunk_only_dual_version_spt_scipy_commits.__name__ commit_type: CommitConvention = repo_name.split("_")[-2] # type: ignore[assignment] return { "definition": build_trunk_only_repo_w_dual_version_support( repo_name=repo_name, commit_type=commit_type, dest_dir=example_project_dir, ), "repo": example_project_git_repo(), } repo_w_dual_version_support_w_prereleases.py000066400000000000000000000517301475670435200367440ustar00rootroot00000000000000python-semantic-release-9.21.0/tests/fixtures/repos/trunk_based_devfrom __future__ import annotations from datetime import timedelta from itertools import count from pathlib import Path from typing import TYPE_CHECKING import pytest from semantic_release.cli.config import ChangelogOutputFormat import tests.conftest import tests.const import tests.util from tests.const import ( DEFAULT_BRANCH_NAME, EXAMPLE_HVCS_DOMAIN, INITIAL_COMMIT_MESSAGE, RepoActionStep, ) if TYPE_CHECKING: from typing import Sequence from tests.conftest import ( GetCachedRepoDataFn, GetMd5ForSetOfFilesFn, GetStableDateNowFn, ) from tests.fixtures.example_project import ( ExProjectDir, ) from tests.fixtures.git_repo import ( BuildRepoFromDefinitionFn, BuildRepoOrCopyCacheFn, BuildSpecificRepoFn, BuiltRepoResult, CommitConvention, ConvertCommitSpecsToCommitDefsFn, ExProjectGitRepoFn, GetRepoDefinitionFn, RepoActions, RepoActionWriteChangelogsDestFile, TomlSerializableTypes, ) MAINTENANCE_BRANCH_NAME = "v1.x" @pytest.fixture(scope="session") def deps_files_4_repo_w_dual_version_spt_w_prereleases( deps_files_4_example_git_project: list[Path], ) -> list[Path]: return [ *deps_files_4_example_git_project, # This file Path(__file__).absolute(), # because of imports Path(tests.const.__file__).absolute(), Path(tests.util.__file__).absolute(), # because of the fixtures Path(tests.conftest.__file__).absolute(), ] @pytest.fixture(scope="session") def build_spec_hash_4_repo_w_dual_version_spt_w_prereleases( get_md5_for_set_of_files: GetMd5ForSetOfFilesFn, deps_files_4_repo_w_dual_version_spt_w_prereleases: list[Path], ) -> str: # Generates a hash of the build spec to set when to invalidate the cache return get_md5_for_set_of_files(deps_files_4_repo_w_dual_version_spt_w_prereleases) @pytest.fixture(scope="session") def get_repo_definition_4_trunk_only_repo_w_dual_version_spt_w_prereleases( convert_commit_specs_to_commit_defs: ConvertCommitSpecsToCommitDefsFn, changelog_md_file: Path, changelog_rst_file: Path, stable_now_date: GetStableDateNowFn, ) -> GetRepoDefinitionFn: """ Builds a repository with trunk-only committing (no-branching) strategy with only official releases with latest and previous version support. """ def _get_repo_from_defintion( commit_type: CommitConvention, hvcs_client_name: str = "github", hvcs_domain: str = EXAMPLE_HVCS_DOMAIN, tag_format_str: str | None = None, extra_configs: dict[str, TomlSerializableTypes] | None = None, mask_initial_release: bool = False, ) -> Sequence[RepoActions]: stable_now_datetime = stable_now_date() commit_timestamp_gen = ( (stable_now_datetime + timedelta(seconds=i)).isoformat(timespec="seconds") for i in count(step=1) ) changelog_file_definitons: Sequence[RepoActionWriteChangelogsDestFile] = [ { "path": changelog_md_file, "format": ChangelogOutputFormat.MARKDOWN, }, { "path": changelog_rst_file, "format": ChangelogOutputFormat.RESTRUCTURED_TEXT, }, ] repo_construction_steps: list[RepoActions] = [] repo_construction_steps.append( { "action": RepoActionStep.CONFIGURE, "details": { "commit_type": commit_type, "hvcs_client_name": hvcs_client_name, "hvcs_domain": hvcs_domain, "tag_format_str": tag_format_str, "mask_initial_release": mask_initial_release, "extra_configs": { # Set the default release branch "tool.semantic_release.branches.latest": { "match": r"^(main|master)$", "prerelease": False, }, "tool.semantic_release.branches.maintenance": { "match": r"^v1\.x$", "prerelease": False, }, "tool.semantic_release.allow_zero_version": False, **(extra_configs or {}), }, }, } ) # Make initial release new_version = "1.0.0" repo_construction_steps.extend( [ { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "conventional": INITIAL_COMMIT_MESSAGE, "emoji": INITIAL_COMMIT_MESSAGE, "scipy": INITIAL_COMMIT_MESSAGE, "datetime": next(commit_timestamp_gen), "include_in_changelog": bool( commit_type == "emoji" ), }, { "conventional": "feat: add new feature", "emoji": ":sparkles: add new feature", "scipy": "ENH: add new feature", "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, ), }, }, { "action": RepoActionStep.RELEASE, "details": { "version": new_version, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitons, }, }, ], }, }, ] ) # Make a fix and officially release it new_version = "1.0.1" repo_construction_steps.extend( [ { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "conventional": "fix: correct some text", "emoji": ":bug: correct some text", "scipy": "MAINT: correct some text", "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, ), }, }, { "action": RepoActionStep.RELEASE, "details": { "version": new_version, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitons, }, }, ], }, }, ] ) # Make a breaking change and officially release it new_version = "2.0.0" repo_construction_steps.extend( [ { "action": RepoActionStep.GIT_CHECKOUT, "details": { "create_branch": { "name": MAINTENANCE_BRANCH_NAME, "start_branch": DEFAULT_BRANCH_NAME, } }, }, { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": DEFAULT_BRANCH_NAME}, }, { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "conventional": str.join( "\n\n", [ "feat: add revolutionary feature", "BREAKING CHANGE: this is a breaking change", ], ), "emoji": str.join( "\n\n", [ ":boom: add revolutionary feature", "This change is a breaking change", ], ), "scipy": str.join( "\n\n", [ "API: add revolutionary feature", "BREAKING CHANGE: this is a breaking change", ], ), "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, ), }, }, { "action": RepoActionStep.RELEASE, "details": { "version": new_version, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitons, }, }, ], }, }, ] ) # Attempt to fix a critical bug in the previous version and release it as a prerelease version # This is based on https://github.com/python-semantic-release/python-semantic-release/issues/861 new_version = "1.0.2-hotfix.1" repo_construction_steps.extend( [ { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": MAINTENANCE_BRANCH_NAME}, }, { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "conventional": "fix: correct critical bug", "emoji": ":bug: correct critical bug", "scipy": "MAINT: correct critical bug", "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, ), }, }, { "action": RepoActionStep.RELEASE, "details": { "version": new_version, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "max_version": new_version, "dest_files": changelog_file_definitons, }, }, ], }, }, ] ) # The Hotfix didn't work, so correct it and try again new_version = "1.0.2-hotfix.2" repo_construction_steps.extend( [ { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "conventional": "fix: resolve critical bug", "emoji": ":bug: resolve critical bug", "scipy": "MAINT: resolve critical bug", "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, ), }, }, { "action": RepoActionStep.RELEASE, "details": { "version": new_version, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "max_version": new_version, "dest_files": changelog_file_definitons, }, }, ], }, }, ] ) # It finally was resolved so release it officially new_version = "1.0.2" repo_construction_steps.extend( [ # { # "action": RepoActionStep.MAKE_COMMITS, # "details": { # "commits": convert_commit_specs_to_commit_defs( # [ # { # "conventional": "docs: update documentation regarding critical bug", # "emoji": ":books: update documentation regarding critical bug", # "scipy": "DOC: update documentation regarding critical bug", # "datetime": next(commit_timestamp_gen), # "include_in_changelog": True, # }, # ], # commit_type, # ), # }, # }, { "action": RepoActionStep.RELEASE, "details": { "version": new_version, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "max_version": new_version, "dest_files": changelog_file_definitons, }, }, ], }, }, ] ) # Return to the latest release variant repo_construction_steps.extend( [ { "action": RepoActionStep.GIT_CHECKOUT, "details": {"branch": DEFAULT_BRANCH_NAME}, }, # TODO: return and make another release on the latest version # currently test variant of the changelog generator can't support this ] ) return repo_construction_steps return _get_repo_from_defintion @pytest.fixture(scope="session") def build_trunk_only_repo_w_dual_version_spt_w_prereleases( build_repo_from_definition: BuildRepoFromDefinitionFn, get_repo_definition_4_trunk_only_repo_w_dual_version_spt_w_prereleases: GetRepoDefinitionFn, get_cached_repo_data: GetCachedRepoDataFn, build_repo_or_copy_cache: BuildRepoOrCopyCacheFn, build_spec_hash_4_repo_w_dual_version_spt_w_prereleases: str, ) -> BuildSpecificRepoFn: def _build_specific_repo_type( repo_name: str, commit_type: CommitConvention, dest_dir: Path ) -> Sequence[RepoActions]: def _build_repo(cached_repo_path: Path) -> Sequence[RepoActions]: repo_construction_steps = ( get_repo_definition_4_trunk_only_repo_w_dual_version_spt_w_prereleases( commit_type=commit_type, ) ) return build_repo_from_definition(cached_repo_path, repo_construction_steps) build_repo_or_copy_cache( repo_name=repo_name, build_spec_hash=build_spec_hash_4_repo_w_dual_version_spt_w_prereleases, build_repo_func=_build_repo, dest_dir=dest_dir, ) if not (cached_repo_data := get_cached_repo_data(proj_dirname=repo_name)): raise ValueError("Failed to retrieve repo data from cache") return cached_repo_data["build_definition"] return _build_specific_repo_type # --------------------------------------------------------------------------- # # Test-level fixtures that will cache the built directory & set up test case # # --------------------------------------------------------------------------- # @pytest.fixture def repo_w_trunk_only_dual_version_spt_w_prereleases_conventional_commits( build_trunk_only_repo_w_dual_version_spt_w_prereleases: BuildSpecificRepoFn, example_project_git_repo: ExProjectGitRepoFn, example_project_dir: ExProjectDir, change_to_ex_proj_dir: None, ) -> BuiltRepoResult: repo_name = ( repo_w_trunk_only_dual_version_spt_w_prereleases_conventional_commits.__name__ ) commit_type: CommitConvention = repo_name.split("_")[-2] # type: ignore[assignment] return { "definition": build_trunk_only_repo_w_dual_version_spt_w_prereleases( repo_name=repo_name, commit_type=commit_type, dest_dir=example_project_dir, ), "repo": example_project_git_repo(), } @pytest.fixture def repo_w_trunk_only_dual_version_spt_w_prereleases_emoji_commits( build_trunk_only_repo_w_dual_version_spt_w_prereleases: BuildSpecificRepoFn, example_project_git_repo: ExProjectGitRepoFn, example_project_dir: ExProjectDir, change_to_ex_proj_dir: None, ) -> BuiltRepoResult: repo_name = repo_w_trunk_only_dual_version_spt_w_prereleases_emoji_commits.__name__ commit_type: CommitConvention = repo_name.split("_")[-2] # type: ignore[assignment] return { "definition": build_trunk_only_repo_w_dual_version_spt_w_prereleases( repo_name=repo_name, commit_type=commit_type, dest_dir=example_project_dir, ), "repo": example_project_git_repo(), } @pytest.fixture def repo_w_trunk_only_dual_version_spt_w_prereleases_scipy_commits( build_trunk_only_repo_w_dual_version_spt_w_prereleases: BuildSpecificRepoFn, example_project_git_repo: ExProjectGitRepoFn, example_project_dir: ExProjectDir, change_to_ex_proj_dir: None, ) -> BuiltRepoResult: repo_name = repo_w_trunk_only_dual_version_spt_w_prereleases_scipy_commits.__name__ commit_type: CommitConvention = repo_name.split("_")[-2] # type: ignore[assignment] return { "definition": build_trunk_only_repo_w_dual_version_spt_w_prereleases( repo_name=repo_name, commit_type=commit_type, dest_dir=example_project_dir, ), "repo": example_project_git_repo(), } python-semantic-release-9.21.0/tests/fixtures/repos/trunk_based_dev/repo_w_no_tags.py000066400000000000000000000267411475670435200312530ustar00rootroot00000000000000from __future__ import annotations from datetime import timedelta from itertools import count from pathlib import Path from typing import TYPE_CHECKING import pytest from semantic_release.cli.config import ChangelogOutputFormat import tests.conftest import tests.const import tests.util from tests.const import ( EXAMPLE_HVCS_DOMAIN, INITIAL_COMMIT_MESSAGE, RepoActionStep, ) if TYPE_CHECKING: from typing import Sequence from tests.conftest import ( GetCachedRepoDataFn, GetMd5ForSetOfFilesFn, GetStableDateNowFn, ) from tests.fixtures.example_project import ExProjectDir from tests.fixtures.git_repo import ( BuildRepoFromDefinitionFn, BuildRepoOrCopyCacheFn, BuildSpecificRepoFn, BuiltRepoResult, CommitConvention, ConvertCommitSpecsToCommitDefsFn, ExProjectGitRepoFn, GetRepoDefinitionFn, RepoActions, TomlSerializableTypes, ) @pytest.fixture(scope="session") def deps_files_4_repo_w_no_tags( deps_files_4_example_git_project: list[Path], ) -> list[Path]: return [ *deps_files_4_example_git_project, # This file Path(__file__).absolute(), # because of imports Path(tests.const.__file__).absolute(), Path(tests.util.__file__).absolute(), # because of the fixtures Path(tests.conftest.__file__).absolute(), ] @pytest.fixture(scope="session") def build_spec_hash_4_repo_w_no_tags( get_md5_for_set_of_files: GetMd5ForSetOfFilesFn, deps_files_4_repo_w_no_tags: list[Path], ) -> str: # Generates a hash of the build spec to set when to invalidate the cache return get_md5_for_set_of_files(deps_files_4_repo_w_no_tags) @pytest.fixture(scope="session") def get_repo_definition_4_trunk_only_repo_w_no_tags( convert_commit_specs_to_commit_defs: ConvertCommitSpecsToCommitDefsFn, changelog_md_file: Path, changelog_rst_file: Path, stable_now_date: GetStableDateNowFn, ) -> GetRepoDefinitionFn: """ Builds a repository with trunk-only committing (no-branching) strategy without any releases. """ def _get_repo_from_defintion( commit_type: CommitConvention, hvcs_client_name: str = "github", hvcs_domain: str = EXAMPLE_HVCS_DOMAIN, tag_format_str: str | None = None, extra_configs: dict[str, TomlSerializableTypes] | None = None, mask_initial_release: bool = False, ) -> Sequence[RepoActions]: stable_now_datetime = stable_now_date() commit_timestamp_gen = ( (stable_now_datetime + timedelta(seconds=i)).isoformat(timespec="seconds") for i in count(step=1) ) repo_construction_steps: list[RepoActions] = [] repo_construction_steps.extend( [ { "action": RepoActionStep.CONFIGURE, "details": { "commit_type": commit_type, "hvcs_client_name": hvcs_client_name, "hvcs_domain": hvcs_domain, "tag_format_str": tag_format_str, "mask_initial_release": mask_initial_release, "extra_configs": { # Set the default release branch "tool.semantic_release.branches.main": { "match": r"^(main|master)$", "prerelease": False, }, **(extra_configs or {}), }, }, }, { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "conventional": INITIAL_COMMIT_MESSAGE, "emoji": INITIAL_COMMIT_MESSAGE, "scipy": INITIAL_COMMIT_MESSAGE, "datetime": next(commit_timestamp_gen), "include_in_changelog": bool( commit_type == "emoji" ), }, { "conventional": "feat: add new feature", "emoji": ":sparkles: add new feature", "scipy": "ENH: add new feature", "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, { "conventional": "fix: correct some text", "emoji": ":bug: correct some text", "scipy": "MAINT: correct some text", "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, { "conventional": "fix: correct more text", "emoji": ":bug: correct more text", "scipy": "MAINT: correct more text", "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, ), }, }, { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": "Unreleased", "dest_files": [ { "path": changelog_md_file, "format": ChangelogOutputFormat.MARKDOWN, }, { "path": changelog_rst_file, "format": ChangelogOutputFormat.RESTRUCTURED_TEXT, }, ], }, }, ] ) return repo_construction_steps return _get_repo_from_defintion @pytest.fixture(scope="session") def build_trunk_only_repo_w_no_tags( build_repo_from_definition: BuildRepoFromDefinitionFn, get_repo_definition_4_trunk_only_repo_w_no_tags: GetRepoDefinitionFn, get_cached_repo_data: GetCachedRepoDataFn, build_repo_or_copy_cache: BuildRepoOrCopyCacheFn, build_spec_hash_4_repo_w_no_tags: str, ) -> BuildSpecificRepoFn: def _build_specific_repo_type( repo_name: str, commit_type: CommitConvention, dest_dir: Path ) -> Sequence[RepoActions]: def _build_repo(cached_repo_path: Path) -> Sequence[RepoActions]: repo_construction_steps = get_repo_definition_4_trunk_only_repo_w_no_tags( commit_type=commit_type, ) return build_repo_from_definition(cached_repo_path, repo_construction_steps) build_repo_or_copy_cache( repo_name=repo_name, build_spec_hash=build_spec_hash_4_repo_w_no_tags, build_repo_func=_build_repo, dest_dir=dest_dir, ) if not (cached_repo_data := get_cached_repo_data(proj_dirname=repo_name)): raise ValueError("Failed to retrieve repo data from cache") return cached_repo_data["build_definition"] return _build_specific_repo_type # --------------------------------------------------------------------------- # # Test-level fixtures that will cache the built directory & set up test case # # --------------------------------------------------------------------------- # @pytest.fixture def repo_w_no_tags_conventional_commits_using_tag_format( build_repo_from_definition: BuildRepoFromDefinitionFn, get_repo_definition_4_trunk_only_repo_w_no_tags: GetRepoDefinitionFn, get_cached_repo_data: GetCachedRepoDataFn, build_repo_or_copy_cache: BuildRepoOrCopyCacheFn, build_spec_hash_4_repo_w_no_tags: str, example_project_git_repo: ExProjectGitRepoFn, example_project_dir: ExProjectDir, change_to_ex_proj_dir: None, ) -> BuiltRepoResult: """ Replicates repo with no tags, but with a tag format X{version} Follows tag format defined in python-semantic-release#1137 """ repo_name = repo_w_no_tags_conventional_commits_using_tag_format.__name__ commit_type: CommitConvention = ( repo_name.split("_commits", maxsplit=1)[0].split("_")[-1] # type: ignore[assignment] ) def _build_repo(cached_repo_path: Path) -> Sequence[RepoActions]: repo_construction_steps = get_repo_definition_4_trunk_only_repo_w_no_tags( commit_type=commit_type, tag_format_str="X{version}", ) return build_repo_from_definition(cached_repo_path, repo_construction_steps) build_repo_or_copy_cache( repo_name=repo_name, build_spec_hash=build_spec_hash_4_repo_w_no_tags, build_repo_func=_build_repo, dest_dir=example_project_dir, ) if not (cached_repo_data := get_cached_repo_data(proj_dirname=repo_name)): raise ValueError("Failed to retrieve repo data from cache") return { "definition": cached_repo_data["build_definition"], "repo": example_project_git_repo(), } @pytest.fixture def repo_w_no_tags_conventional_commits( build_trunk_only_repo_w_no_tags: BuildSpecificRepoFn, example_project_git_repo: ExProjectGitRepoFn, example_project_dir: ExProjectDir, change_to_ex_proj_dir: None, ) -> BuiltRepoResult: repo_name = repo_w_no_tags_conventional_commits.__name__ commit_type: CommitConvention = repo_name.split("_")[-2] # type: ignore[assignment] return { "definition": build_trunk_only_repo_w_no_tags( repo_name=repo_name, commit_type=commit_type, dest_dir=example_project_dir, ), "repo": example_project_git_repo(), } @pytest.fixture def repo_w_no_tags_emoji_commits( build_trunk_only_repo_w_no_tags: BuildSpecificRepoFn, example_project_git_repo: ExProjectGitRepoFn, example_project_dir: ExProjectDir, change_to_ex_proj_dir: None, ) -> BuiltRepoResult: repo_name = repo_w_no_tags_emoji_commits.__name__ commit_type: CommitConvention = repo_name.split("_")[-2] # type: ignore[assignment] return { "definition": build_trunk_only_repo_w_no_tags( repo_name=repo_name, commit_type=commit_type, dest_dir=example_project_dir, ), "repo": example_project_git_repo(), } @pytest.fixture def repo_w_no_tags_scipy_commits( build_trunk_only_repo_w_no_tags: BuildSpecificRepoFn, example_project_git_repo: ExProjectGitRepoFn, example_project_dir: ExProjectDir, change_to_ex_proj_dir: None, ) -> BuiltRepoResult: repo_name = repo_w_no_tags_scipy_commits.__name__ commit_type: CommitConvention = repo_name.split("_")[-2] # type: ignore[assignment] return { "definition": build_trunk_only_repo_w_no_tags( repo_name=repo_name, commit_type=commit_type, dest_dir=example_project_dir, ), "repo": example_project_git_repo(), } python-semantic-release-9.21.0/tests/fixtures/repos/trunk_based_dev/repo_w_prereleases.py000066400000000000000000000352401475670435200321250ustar00rootroot00000000000000from __future__ import annotations from datetime import timedelta from itertools import count from pathlib import Path from typing import TYPE_CHECKING import pytest from semantic_release.cli.config import ChangelogOutputFormat import tests.conftest import tests.const import tests.util from tests.const import ( EXAMPLE_HVCS_DOMAIN, INITIAL_COMMIT_MESSAGE, RepoActionStep, ) if TYPE_CHECKING: from typing import Sequence from tests.conftest import ( GetCachedRepoDataFn, GetMd5ForSetOfFilesFn, GetStableDateNowFn, ) from tests.fixtures.example_project import ExProjectDir from tests.fixtures.git_repo import ( BuildRepoFromDefinitionFn, BuildRepoOrCopyCacheFn, BuildSpecificRepoFn, BuiltRepoResult, CommitConvention, ConvertCommitSpecsToCommitDefsFn, ExProjectGitRepoFn, GetRepoDefinitionFn, RepoActions, RepoActionWriteChangelogsDestFile, TomlSerializableTypes, ) @pytest.fixture(scope="session") def deps_files_4_repo_w_prereleases( deps_files_4_example_git_project: list[Path], ) -> list[Path]: return [ *deps_files_4_example_git_project, # This file Path(__file__).absolute(), # because of imports Path(tests.const.__file__).absolute(), Path(tests.util.__file__).absolute(), # because of the fixtures Path(tests.conftest.__file__).absolute(), ] @pytest.fixture(scope="session") def build_spec_hash_4_repo_w_prereleases( get_md5_for_set_of_files: GetMd5ForSetOfFilesFn, deps_files_4_repo_w_prereleases: list[Path], ) -> str: # Generates a hash of the build spec to set when to invalidate the cache return get_md5_for_set_of_files(deps_files_4_repo_w_prereleases) @pytest.fixture(scope="session") def get_repo_definition_4_trunk_only_repo_w_prerelease_tags( convert_commit_specs_to_commit_defs: ConvertCommitSpecsToCommitDefsFn, changelog_md_file: Path, changelog_rst_file: Path, stable_now_date: GetStableDateNowFn, ) -> GetRepoDefinitionFn: """ Builds a repository with trunk-only committing (no-branching) strategy with official and prereleases releases. """ def _get_repo_from_defintion( commit_type: CommitConvention, hvcs_client_name: str = "github", hvcs_domain: str = EXAMPLE_HVCS_DOMAIN, tag_format_str: str | None = None, extra_configs: dict[str, TomlSerializableTypes] | None = None, mask_initial_release: bool = False, ) -> Sequence[RepoActions]: stable_now_datetime = stable_now_date() commit_timestamp_gen = ( (stable_now_datetime + timedelta(seconds=i)).isoformat(timespec="seconds") for i in count(step=1) ) changelog_file_definitons: Sequence[RepoActionWriteChangelogsDestFile] = [ { "path": changelog_md_file, "format": ChangelogOutputFormat.MARKDOWN, }, { "path": changelog_rst_file, "format": ChangelogOutputFormat.RESTRUCTURED_TEXT, }, ] repo_construction_steps: list[RepoActions] = [] repo_construction_steps.append( { "action": RepoActionStep.CONFIGURE, "details": { "commit_type": commit_type, "hvcs_client_name": hvcs_client_name, "hvcs_domain": hvcs_domain, "tag_format_str": tag_format_str, "mask_initial_release": mask_initial_release, "extra_configs": { # Set the default release branch "tool.semantic_release.branches.main": { "match": r"^(main|master)$", "prerelease": False, }, "tool.semantic_release.allow_zero_version": True, **(extra_configs or {}), }, }, } ) # Make initial release new_version = "0.1.0" repo_construction_steps.extend( [ { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "conventional": INITIAL_COMMIT_MESSAGE, "emoji": INITIAL_COMMIT_MESSAGE, "scipy": INITIAL_COMMIT_MESSAGE, "datetime": next(commit_timestamp_gen), "include_in_changelog": bool( commit_type == "emoji" ), }, { "conventional": "feat: add new feature", "emoji": ":sparkles: add new feature", "scipy": "ENH: add new feature", "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, ), }, }, { "action": RepoActionStep.RELEASE, "details": { "version": new_version, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitons, }, }, ], }, }, ] ) # Make a fix and release it as a release candidate new_version = "0.1.1-rc.1" repo_construction_steps.extend( [ { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "conventional": "fix: correct some text", "emoji": ":bug: correct some text", "scipy": "MAINT: correct some text", "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, ), }, }, { "action": RepoActionStep.RELEASE, "details": { "version": new_version, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitons, }, }, ], }, }, ] ) # Make an additional feature change and release it as a new release candidate new_version = "0.2.0-rc.1" repo_construction_steps.extend( [ { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "conventional": "feat: add some more text", "emoji": ":sparkles: add some more text", "scipy": "ENH: add some more text", "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, ), }, }, { "action": RepoActionStep.RELEASE, "details": { "version": new_version, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitons, }, }, ], }, }, ] ) # Make an additional feature change and officially release the latest new_version = "0.2.0" repo_construction_steps.extend( [ { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "conventional": "feat(cli): add cli command", "emoji": ":sparkles:(cli) add cli command", "scipy": "ENH(cli): add cli command", "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, ), }, }, { "action": RepoActionStep.RELEASE, "details": { "version": new_version, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitons, }, }, ], }, }, ] ) return repo_construction_steps return _get_repo_from_defintion @pytest.fixture(scope="session") def build_trunk_only_repo_w_prerelease_tags( build_repo_from_definition: BuildRepoFromDefinitionFn, get_repo_definition_4_trunk_only_repo_w_prerelease_tags: GetRepoDefinitionFn, get_cached_repo_data: GetCachedRepoDataFn, build_repo_or_copy_cache: BuildRepoOrCopyCacheFn, build_spec_hash_4_repo_w_prereleases: str, ) -> BuildSpecificRepoFn: def _build_specific_repo_type( repo_name: str, commit_type: CommitConvention, dest_dir: Path ) -> Sequence[RepoActions]: def _build_repo(cached_repo_path: Path) -> Sequence[RepoActions]: repo_construction_steps = ( get_repo_definition_4_trunk_only_repo_w_prerelease_tags( commit_type=commit_type, ) ) return build_repo_from_definition(cached_repo_path, repo_construction_steps) build_repo_or_copy_cache( repo_name=repo_name, build_spec_hash=build_spec_hash_4_repo_w_prereleases, build_repo_func=_build_repo, dest_dir=dest_dir, ) if not (cached_repo_data := get_cached_repo_data(proj_dirname=repo_name)): raise ValueError("Failed to retrieve repo data from cache") return cached_repo_data["build_definition"] return _build_specific_repo_type # --------------------------------------------------------------------------- # # Test-level fixtures that will cache the built directory & set up test case # # --------------------------------------------------------------------------- # @pytest.fixture def repo_w_trunk_only_n_prereleases_conventional_commits( build_trunk_only_repo_w_prerelease_tags: BuildSpecificRepoFn, example_project_git_repo: ExProjectGitRepoFn, example_project_dir: ExProjectDir, change_to_ex_proj_dir: None, ) -> BuiltRepoResult: repo_name = repo_w_trunk_only_n_prereleases_conventional_commits.__name__ commit_type: CommitConvention = repo_name.split("_")[-2] # type: ignore[assignment] return { "definition": build_trunk_only_repo_w_prerelease_tags( repo_name=repo_name, commit_type=commit_type, dest_dir=example_project_dir, ), "repo": example_project_git_repo(), } @pytest.fixture def repo_w_trunk_only_n_prereleases_emoji_commits( build_trunk_only_repo_w_prerelease_tags: BuildSpecificRepoFn, example_project_git_repo: ExProjectGitRepoFn, example_project_dir: ExProjectDir, change_to_ex_proj_dir: None, ) -> BuiltRepoResult: repo_name = repo_w_trunk_only_n_prereleases_emoji_commits.__name__ commit_type: CommitConvention = repo_name.split("_")[-2] # type: ignore[assignment] return { "definition": build_trunk_only_repo_w_prerelease_tags( repo_name=repo_name, commit_type=commit_type, dest_dir=example_project_dir, ), "repo": example_project_git_repo(), } @pytest.fixture def repo_w_trunk_only_n_prereleases_scipy_commits( build_trunk_only_repo_w_prerelease_tags: BuildSpecificRepoFn, example_project_git_repo: ExProjectGitRepoFn, example_project_dir: ExProjectDir, change_to_ex_proj_dir: None, ) -> BuiltRepoResult: repo_name = repo_w_trunk_only_n_prereleases_scipy_commits.__name__ commit_type: CommitConvention = repo_name.split("_")[-2] # type: ignore[assignment] return { "definition": build_trunk_only_repo_w_prerelease_tags( repo_name=repo_name, commit_type=commit_type, dest_dir=example_project_dir, ), "repo": example_project_git_repo(), } python-semantic-release-9.21.0/tests/fixtures/repos/trunk_based_dev/repo_w_tags.py000066400000000000000000000311241475670435200305460ustar00rootroot00000000000000from __future__ import annotations from datetime import timedelta from itertools import count from pathlib import Path from typing import TYPE_CHECKING import pytest from semantic_release.cli.config import ChangelogOutputFormat import tests.conftest import tests.const import tests.util from tests.const import ( EXAMPLE_HVCS_DOMAIN, INITIAL_COMMIT_MESSAGE, RepoActionStep, ) if TYPE_CHECKING: from typing import Sequence from tests.conftest import ( GetCachedRepoDataFn, GetMd5ForSetOfFilesFn, GetStableDateNowFn, ) from tests.fixtures.example_project import ( ExProjectDir, ) from tests.fixtures.git_repo import ( BuildRepoFromDefinitionFn, BuildRepoOrCopyCacheFn, BuildSpecificRepoFn, BuiltRepoResult, CommitConvention, ConvertCommitSpecsToCommitDefsFn, ExProjectGitRepoFn, GetRepoDefinitionFn, RepoActions, RepoActionWriteChangelogsDestFile, TomlSerializableTypes, ) @pytest.fixture(scope="session") def deps_files_4_repo_w_tags( deps_files_4_example_git_project: list[Path], ) -> list[Path]: return [ *deps_files_4_example_git_project, # This file Path(__file__).absolute(), # because of imports Path(tests.const.__file__).absolute(), Path(tests.util.__file__).absolute(), # because of the fixtures Path(tests.conftest.__file__).absolute(), ] @pytest.fixture(scope="session") def build_spec_hash_4_repo_w_tags( get_md5_for_set_of_files: GetMd5ForSetOfFilesFn, deps_files_4_repo_w_tags: list[Path], ) -> str: # Generates a hash of the build spec to set when to invalidate the cache return get_md5_for_set_of_files(deps_files_4_repo_w_tags) @pytest.fixture(scope="session") def get_repo_definition_4_trunk_only_repo_w_tags( convert_commit_specs_to_commit_defs: ConvertCommitSpecsToCommitDefsFn, changelog_md_file: Path, changelog_rst_file: Path, stable_now_date: GetStableDateNowFn, ) -> GetRepoDefinitionFn: """ Builds a repository with trunk-only committing (no-branching) strategy with only official releases. """ def _get_repo_from_defintion( commit_type: CommitConvention, hvcs_client_name: str = "github", hvcs_domain: str = EXAMPLE_HVCS_DOMAIN, tag_format_str: str | None = None, extra_configs: dict[str, TomlSerializableTypes] | None = None, mask_initial_release: bool = False, ) -> Sequence[RepoActions]: stable_now_datetime = stable_now_date() commit_timestamp_gen = ( (stable_now_datetime + timedelta(seconds=i)).isoformat(timespec="seconds") for i in count(step=1) ) changelog_file_definitons: Sequence[RepoActionWriteChangelogsDestFile] = [ { "path": changelog_md_file, "format": ChangelogOutputFormat.MARKDOWN, }, { "path": changelog_rst_file, "format": ChangelogOutputFormat.RESTRUCTURED_TEXT, }, ] repo_construction_steps: list[RepoActions] = [] repo_construction_steps.append( { "action": RepoActionStep.CONFIGURE, "details": { "commit_type": commit_type, "hvcs_client_name": hvcs_client_name, "hvcs_domain": hvcs_domain, "tag_format_str": tag_format_str, "mask_initial_release": mask_initial_release, "extra_configs": { # Set the default release branch "tool.semantic_release.branches.main": { "match": r"^(main|master)$", "prerelease": False, }, "tool.semantic_release.allow_zero_version": True, **(extra_configs or {}), }, }, } ) # Make initial release new_version = "0.1.0" repo_construction_steps.extend( [ { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "conventional": INITIAL_COMMIT_MESSAGE, "emoji": INITIAL_COMMIT_MESSAGE, "scipy": INITIAL_COMMIT_MESSAGE, "datetime": next(commit_timestamp_gen), "include_in_changelog": bool( commit_type == "emoji" ), }, { "conventional": "feat: add new feature", "emoji": ":sparkles: add new feature", "scipy": "ENH: add new feature", "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, ), }, }, { "action": RepoActionStep.RELEASE, "details": { "version": new_version, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitons, }, }, ], }, }, ] ) # Make a fix and officially release it new_version = "0.1.1" repo_construction_steps.extend( [ { "action": RepoActionStep.MAKE_COMMITS, "details": { "commits": convert_commit_specs_to_commit_defs( [ { "conventional": "fix: correct some text", "emoji": ":bug: correct some text", "scipy": "MAINT: correct some text", "datetime": next(commit_timestamp_gen), "include_in_changelog": True, }, ], commit_type, ), }, }, { "action": RepoActionStep.RELEASE, "details": { "version": new_version, "datetime": next(commit_timestamp_gen), "pre_actions": [ { "action": RepoActionStep.WRITE_CHANGELOGS, "details": { "new_version": new_version, "dest_files": changelog_file_definitons, }, }, ], }, }, ] ) return repo_construction_steps return _get_repo_from_defintion @pytest.fixture(scope="session") def build_trunk_only_repo_w_tags( build_repo_from_definition: BuildRepoFromDefinitionFn, get_repo_definition_4_trunk_only_repo_w_tags: GetRepoDefinitionFn, get_cached_repo_data: GetCachedRepoDataFn, build_repo_or_copy_cache: BuildRepoOrCopyCacheFn, build_spec_hash_4_repo_w_tags: str, ) -> BuildSpecificRepoFn: def _build_specific_repo_type( repo_name: str, commit_type: CommitConvention, dest_dir: Path ) -> Sequence[RepoActions]: def _build_repo(cached_repo_path: Path) -> Sequence[RepoActions]: repo_construction_steps = get_repo_definition_4_trunk_only_repo_w_tags( commit_type=commit_type, ) return build_repo_from_definition(cached_repo_path, repo_construction_steps) build_repo_or_copy_cache( repo_name=repo_name, build_spec_hash=build_spec_hash_4_repo_w_tags, build_repo_func=_build_repo, dest_dir=dest_dir, ) if not (cached_repo_data := get_cached_repo_data(proj_dirname=repo_name)): raise ValueError("Failed to retrieve repo data from cache") return cached_repo_data["build_definition"] return _build_specific_repo_type # --------------------------------------------------------------------------- # # Test-level fixtures that will cache the built directory & set up test case # # --------------------------------------------------------------------------- # @pytest.fixture def repo_w_trunk_only_conventional_commits_using_tag_format( build_repo_from_definition: BuildRepoFromDefinitionFn, get_repo_definition_4_trunk_only_repo_w_tags: GetRepoDefinitionFn, get_cached_repo_data: GetCachedRepoDataFn, build_repo_or_copy_cache: BuildRepoOrCopyCacheFn, build_spec_hash_4_repo_w_tags: str, example_project_git_repo: ExProjectGitRepoFn, example_project_dir: ExProjectDir, change_to_ex_proj_dir: None, ) -> BuiltRepoResult: repo_name = repo_w_trunk_only_conventional_commits_using_tag_format.__name__ commit_type: CommitConvention = ( repo_name.split("_commits", maxsplit=1)[0].split("_")[-1] # type: ignore[assignment] ) def _build_repo(cached_repo_path: Path) -> Sequence[RepoActions]: repo_construction_steps = get_repo_definition_4_trunk_only_repo_w_tags( commit_type=commit_type, tag_format_str="submod-v{version}", ) return build_repo_from_definition(cached_repo_path, repo_construction_steps) build_repo_or_copy_cache( repo_name=repo_name, build_spec_hash=build_spec_hash_4_repo_w_tags, build_repo_func=_build_repo, dest_dir=example_project_dir, ) if not (cached_repo_data := get_cached_repo_data(proj_dirname=repo_name)): raise ValueError("Failed to retrieve repo data from cache") return { "definition": cached_repo_data["build_definition"], "repo": example_project_git_repo(), } @pytest.fixture def repo_w_trunk_only_conventional_commits( build_trunk_only_repo_w_tags: BuildSpecificRepoFn, example_project_git_repo: ExProjectGitRepoFn, example_project_dir: ExProjectDir, change_to_ex_proj_dir: None, ) -> BuiltRepoResult: repo_name = repo_w_trunk_only_conventional_commits.__name__ commit_type: CommitConvention = repo_name.split("_")[-2] # type: ignore[assignment] return { "definition": build_trunk_only_repo_w_tags( repo_name=repo_name, commit_type=commit_type, dest_dir=example_project_dir, ), "repo": example_project_git_repo(), } @pytest.fixture def repo_w_trunk_only_emoji_commits( build_trunk_only_repo_w_tags: BuildSpecificRepoFn, example_project_git_repo: ExProjectGitRepoFn, example_project_dir: ExProjectDir, change_to_ex_proj_dir: None, ) -> BuiltRepoResult: repo_name = repo_w_trunk_only_emoji_commits.__name__ commit_type: CommitConvention = repo_name.split("_")[-2] # type: ignore[assignment] return { "definition": build_trunk_only_repo_w_tags( repo_name=repo_name, commit_type=commit_type, dest_dir=example_project_dir, ), "repo": example_project_git_repo(), } @pytest.fixture def repo_w_trunk_only_scipy_commits( build_trunk_only_repo_w_tags: BuildSpecificRepoFn, example_project_git_repo: ExProjectGitRepoFn, example_project_dir: ExProjectDir, change_to_ex_proj_dir: None, ) -> BuiltRepoResult: repo_name = repo_w_trunk_only_scipy_commits.__name__ commit_type: CommitConvention = repo_name.split("_")[-2] # type: ignore[assignment] return { "definition": build_trunk_only_repo_w_tags( repo_name=repo_name, commit_type=commit_type, dest_dir=example_project_dir, ), "repo": example_project_git_repo(), } python-semantic-release-9.21.0/tests/fixtures/scipy.py000066400000000000000000000310041475670435200230720ustar00rootroot00000000000000from __future__ import annotations from itertools import chain, zip_longest from textwrap import dedent from typing import TYPE_CHECKING import pytest from semantic_release.commit_parser.scipy import ScipyCommitParser from semantic_release.enums import LevelBump if TYPE_CHECKING: from typing import Protocol from semantic_release.commit_parser.scipy import ScipyParserOptions class FormatScipyCommitFn(Protocol): def __call__( self, scipy_tag: str, subject: str, body_parts: list[str] ) -> str: ... @pytest.fixture(scope="session") def format_scipy_commit(): def _format_scipy_commit( scipy_tag: str, subject: str, body_parts: list[str] ) -> str: body = str.join("\n\n", body_parts) return f"{scipy_tag}: {subject}\n\n{body}" return _format_scipy_commit @pytest.fixture(scope="session") def default_scipy_parser() -> ScipyCommitParser: return ScipyCommitParser() @pytest.fixture(scope="session") def default_scipy_parser_options( default_scipy_parser: ScipyCommitParser, ) -> ScipyParserOptions: return default_scipy_parser.get_default_options() @pytest.fixture(scope="session") def scipy_chore_commit_types( default_scipy_parser_options: ScipyParserOptions, ) -> list[str]: return [ k for k, v in default_scipy_parser_options.tag_to_level.items() if v < LevelBump.PATCH ] @pytest.fixture(scope="session") def scipy_patch_commit_types( default_scipy_parser_options: ScipyParserOptions, ) -> list[str]: return [ k for k, v in default_scipy_parser_options.tag_to_level.items() if v is LevelBump.PATCH ] @pytest.fixture(scope="session") def scipy_minor_commit_types( default_scipy_parser_options: ScipyParserOptions, ) -> list[str]: return [ k for k, v in default_scipy_parser_options.tag_to_level.items() if v is LevelBump.MINOR ] @pytest.fixture(scope="session") def scipy_major_commit_types( default_scipy_parser_options: ScipyParserOptions, ) -> list[str]: return [ k for k, v in default_scipy_parser_options.tag_to_level.items() if v is LevelBump.MAJOR ] @pytest.fixture(scope="session") def scipy_nonparseable_commits() -> list[str]: return [ "Initial Commit", "Merge pull request #14447 from AnirudhDagar/rename_ndimage_modules", ] @pytest.fixture(scope="session") def scipy_chore_subjects(scipy_chore_commit_types: list[str]) -> list[str]: subjects = { "BENCH": "disable very slow benchmark in optimize_milp.py", "DOC": "change approx_fprime doctest (#20568)", "STY": "fixed ruff & mypy issues", "TST": "Skip Cython tests for editable installs", "REL": "set version to 1.0.0", "TEST": "Add Cython tests for editable installs", } # Test fixture modification failure prevention assert len(subjects.keys()) == len(scipy_chore_commit_types) return [subjects[k] for k in scipy_chore_commit_types] @pytest.fixture(scope="session") def scipy_patch_subjects(scipy_patch_commit_types: list[str]) -> list[str]: subjects = { "BLD": "move the optimize build steps earlier into the build sequence", "BUG": "Fix invalid default bracket selection in _bracket_minimum (#20563)", "MAINT": "optimize.linprog: fix bug when integrality is a list of all zeros (#20586)", } # Test fixture modification failure prevention assert len(subjects.keys()) == len(scipy_patch_commit_types) return [subjects[k] for k in scipy_patch_commit_types] @pytest.fixture(scope="session") def scipy_minor_subjects(scipy_minor_commit_types: list[str]) -> list[str]: subjects = { "DEP": "stats: switch kendalltau to kwarg-only, remove initial_lexsort", "DEV": "add unicode check to pre-commit-hook", "ENH": "stats.ttest_1samp: add array-API support (#20545)", "REV": "reverted a previous commit", "FEAT": "added a new feature", } # Test fixture modification failure prevention assert len(subjects.keys()) == len(scipy_minor_commit_types) return [subjects[k] for k in scipy_minor_commit_types] @pytest.fixture(scope="session") def scipy_major_subjects(scipy_major_commit_types: list[str]) -> list[str]: subjects = { "API": "dropped support for python 3.7", } # Test fixture modification failure prevention assert len(subjects.keys()) == len(scipy_major_commit_types) return [subjects[k] for k in scipy_major_commit_types] @pytest.fixture(scope="session") def scipy_brk_change_commit_bodies() -> list[list[str]]: brk_chg_msg = dedent( """ BREAKING CHANGE: a description of what is now different with multiple lines """ ).strip() one_line_desc = "resolves bug related to windows incompatiblity" return [ # No regular change description [brk_chg_msg], # regular change description & breaking change message [one_line_desc, brk_chg_msg], # regular change description & breaking change message with footer [one_line_desc, brk_chg_msg, "Resolves: #666"], ] @pytest.fixture(scope="session") def scipy_nonbrking_commit_bodies() -> list[list[str]]: # a GitHub squash merge that preserved PR commit messages (all chore-like) github_squash_merge_body = str.join( "\n\n", [ "* DOC: import ropy.transform to test for numpy error", "* DOC: lower numpy version", "* DOC: lower numpy version further", "* STY: resolve linting issues", ], ) one_block_desc = dedent( """ Bug spotted on Fedora, see https://src.fedoraproject.org/rpms/scipy/pull-request/22 with an additional multiline description """ ).strip() return [ github_squash_merge_body.split("\n\n"), # split into blocks # empty body [], [""], # formatted body (ie dependabot) dedent( """ Bumps [package](https://github.com/namespace/project) from 3.5.3 to 4.1.1. - [Release notes](https://github.com/namespace/project/releases) - [Changelog](https://github.com/namespace/project/blob/4.x/CHANGES) - [Commits](https://github.com/namespace/project/commits/v4.1.1) --- updated-dependencies: - dependency-name: package dependency-type: direct:development update-type: version-update:semver-major """ ) .lstrip() .split("\n\n"), # 1 block description one_block_desc.split("\n\n"), # keywords ["[skip azp] [skip actions]"], # Resolving an issue on GitHub ["Resolves: #127"], [one_block_desc, "Closes: #1024"], ] @pytest.fixture(scope="session") def scipy_chore_commit_parts( scipy_chore_commit_types: list[str], scipy_chore_subjects: list[str], scipy_nonbrking_commit_bodies: list[list[str]], ) -> list[tuple[str, str, list[str]]]: # Test fixture modification failure prevention assert len(scipy_chore_commit_types) == len(scipy_chore_subjects) # build full commit messages with commit type prefix, subject, and body variant # for all body variants return [ (commit_type, subject, commit_body_blocks) for commit_type, subject in zip(scipy_chore_commit_types, scipy_chore_subjects) for commit_body_blocks in scipy_nonbrking_commit_bodies ] @pytest.fixture(scope="session") def scipy_chore_commits( scipy_chore_commit_parts: list[tuple[str, str, list[str]]], format_scipy_commit: FormatScipyCommitFn, ) -> list[str]: # build full commit messages with commit type prefix, subject, and body variant # for all body variants return [ format_scipy_commit(commit_type, subject, commit_body) for commit_type, subject, commit_body in scipy_chore_commit_parts ] @pytest.fixture(scope="session") def scipy_patch_commit_parts( scipy_patch_commit_types: list[str], scipy_patch_subjects: list[str], scipy_nonbrking_commit_bodies: list[list[str]], ) -> list[tuple[str, str, list[str]]]: # Test fixture modification failure prevention assert len(scipy_patch_commit_types) == len(scipy_patch_subjects) # build full commit messages with commit type prefix, subject, and body variant # for all body variants return [ (commit_type, subject, commit_body_blocks) for commit_type, subject in zip(scipy_patch_commit_types, scipy_patch_subjects) for commit_body_blocks in scipy_nonbrking_commit_bodies ] @pytest.fixture(scope="session") def scipy_patch_commits( scipy_patch_commit_parts: list[tuple[str, str, list[str]]], format_scipy_commit: FormatScipyCommitFn, ) -> list[str]: # build full commit messages with commit type prefix, subject, and body variant # for all body variants return [ format_scipy_commit(commit_type, subject, commit_body) for commit_type, subject, commit_body in scipy_patch_commit_parts ] @pytest.fixture(scope="session") def scipy_minor_commit_parts( scipy_minor_commit_types: list[str], scipy_minor_subjects: list[str], scipy_nonbrking_commit_bodies: list[list[str]], ) -> list[tuple[str, str, list[str]]]: # Test fixture modification failure prevention assert len(scipy_minor_commit_types) == len(scipy_minor_subjects) # build full commit messages with commit type prefix, subject, and body variant # for all body variants return [ (commit_type, subject, commit_body_blocks) for commit_type, subject in zip(scipy_minor_commit_types, scipy_minor_subjects) for commit_body_blocks in scipy_nonbrking_commit_bodies ] @pytest.fixture(scope="session") def scipy_minor_commits( scipy_minor_commit_parts: list[tuple[str, str, list[str]]], format_scipy_commit: FormatScipyCommitFn, ) -> list[str]: # build full commit messages with commit type prefix, subject, and body variant # for all body variants return [ format_scipy_commit(commit_type, subject, commit_body) for commit_type, subject, commit_body in scipy_minor_commit_parts ] @pytest.fixture(scope="session") def scipy_major_commit_parts( scipy_major_commit_types: list[str], scipy_major_subjects: list[str], scipy_brk_change_commit_bodies: list[list[str]], ) -> list[tuple[str, str, list[str]]]: # Test fixture modification failure prevention assert len(scipy_major_commit_types) == len(scipy_major_subjects) # build full commit messages with commit type prefix, subject, and body variant # for all body variants return [ (commit_type, subject, commit_body_blocks) for commit_type, subject in zip(scipy_major_commit_types, scipy_major_subjects) for commit_body_blocks in scipy_brk_change_commit_bodies ] @pytest.fixture(scope="session") def scipy_major_commits( scipy_major_commit_parts: list[tuple[str, str, list[str]]], format_scipy_commit: FormatScipyCommitFn, ) -> list[str]: # build full commit messages with commit type prefix, subject, and body variant # for all body variants return [ format_scipy_commit(commit_type, subject, commit_body) for commit_type, subject, commit_body in scipy_major_commit_parts ] @pytest.fixture(scope="session") def scipy_patch_mixed_commits( scipy_patch_commits: list[str], scipy_chore_commits: list[str], ) -> list[str]: return list( filter( None, chain.from_iterable(zip_longest(scipy_patch_commits, scipy_chore_commits)), ) ) @pytest.fixture(scope="session") def scipy_minor_mixed_commits( scipy_minor_commits: list[str], scipy_patch_commits: list[str], scipy_chore_commits: list[str], ) -> list[str]: return list( chain.from_iterable( zip_longest( scipy_minor_commits, scipy_patch_commits, scipy_chore_commits, fillvalue="uninteresting", ) ) ) @pytest.fixture(scope="session") def scipy_major_mixed_commits( scipy_major_commits: list[str], scipy_minor_commits: list[str], scipy_patch_commits: list[str], scipy_chore_commits: list[str], ) -> list[str]: return list( filter( None, chain.from_iterable( zip_longest( scipy_major_commits, scipy_minor_commits, scipy_patch_commits, scipy_chore_commits, ) ), ) ) python-semantic-release-9.21.0/tests/unit/000077500000000000000000000000001475670435200205015ustar00rootroot00000000000000python-semantic-release-9.21.0/tests/unit/__init__.py000066400000000000000000000000001475670435200226000ustar00rootroot00000000000000python-semantic-release-9.21.0/tests/unit/conftest.py000066400000000000000000000010521475670435200226760ustar00rootroot00000000000000from __future__ import annotations from pathlib import Path import pytest @pytest.hookimpl(tryfirst=True) def pytest_collection_modifyitems(items: list[pytest.Item]) -> None: """Apply the unit marker to all tests in the unit test directory.""" unit_test_directory = Path(__file__).parent for item in items: if unit_test_directory in item.path.parents: item.add_marker(pytest.mark.unit) if "order" not in [mark.name for mark in item.own_markers]: item.add_marker(pytest.mark.order("first")) python-semantic-release-9.21.0/tests/unit/semantic_release/000077500000000000000000000000001475670435200240045ustar00rootroot00000000000000python-semantic-release-9.21.0/tests/unit/semantic_release/__init__.py000066400000000000000000000000001475670435200261030ustar00rootroot00000000000000python-semantic-release-9.21.0/tests/unit/semantic_release/changelog/000077500000000000000000000000001475670435200257335ustar00rootroot00000000000000python-semantic-release-9.21.0/tests/unit/semantic_release/changelog/__init__.py000066400000000000000000000000001475670435200300320ustar00rootroot00000000000000python-semantic-release-9.21.0/tests/unit/semantic_release/changelog/conftest.py000066400000000000000000000322211475670435200301320ustar00rootroot00000000000000from __future__ import annotations from datetime import timedelta from textwrap import dedent from typing import TYPE_CHECKING import pytest from git import Commit, Object, Repo from semantic_release.changelog.release_history import Release, ReleaseHistory from semantic_release.commit_parser.token import ParsedCommit from semantic_release.enums import LevelBump from semantic_release.version.version import Version if TYPE_CHECKING: from git import Actor from tests.conftest import GetStableDateNowFn @pytest.fixture def artificial_release_history( commit_author: Actor, stable_now_date: GetStableDateNowFn, ) -> ReleaseHistory: current_datetime = stable_now_date() first_version = Version.parse("1.0.0") second_version = first_version.bump(LevelBump.MINOR) fix_commit_subject = "fix a problem" fix_commit_type = "fix" fix_commit_scope = "cli" fix_commit = Commit( Repo("."), Object.NULL_HEX_SHA[:20].encode("utf-8"), message=f"{fix_commit_type}({fix_commit_scope}): {fix_commit_subject}", ) fix_commit_parsed = ParsedCommit( bump=LevelBump.PATCH, type="fix", scope=fix_commit_scope, descriptions=[fix_commit_subject], breaking_descriptions=[], commit=fix_commit, ) fix_commit_2_subject = "alphabetically first to solve a non-scoped problem" fix_commit_2_type = "fix" fix_commit_2_scope = "" fix_commit_2 = Commit( Repo("."), Object.NULL_HEX_SHA[:20].encode("utf-8"), message=f"{fix_commit_2_type}: {fix_commit_2_subject}", ) fix_commit_2_parsed = ParsedCommit( bump=LevelBump.PATCH, type="fix", scope=fix_commit_2_scope, descriptions=[fix_commit_2_subject], breaking_descriptions=[], commit=fix_commit_2, ) fix_commit_3_subject = "alphabetically first to solve a scoped problem" fix_commit_3_type = "fix" fix_commit_3_scope = "cli" fix_commit_3 = Commit( Repo("."), Object.NULL_HEX_SHA[:20].encode("utf-8"), message=f"{fix_commit_3_type}({fix_commit_3_scope}): {fix_commit_3_subject}", ) fix_commit_3_parsed = ParsedCommit( bump=LevelBump.PATCH, type="fix", scope=fix_commit_3_scope, descriptions=[fix_commit_3_subject], breaking_descriptions=[], commit=fix_commit_3, ) feat_commit_subject = "add a new feature" feat_commit_type = "feat" feat_commit_scope = "cli" feat_commit = Commit( Repo("."), Object.NULL_HEX_SHA[:20].encode("utf-8"), message=f"{feat_commit_type}({feat_commit_scope}): {feat_commit_subject}", ) feat_commit_parsed = ParsedCommit( bump=LevelBump.MINOR, type="feature", scope=feat_commit_scope, descriptions=[feat_commit_subject], breaking_descriptions=[], commit=feat_commit, ) return ReleaseHistory( unreleased={"feature": [feat_commit_parsed]}, released={ second_version: Release( tagger=commit_author, committer=commit_author, tagged_date=current_datetime, elements={ # Purposefully inserted out of order, should be dictsorted in templates "fix": [ # Purposefully inserted out of alphabetical order, should be sorted in templates fix_commit_parsed, fix_commit_2_parsed, # has no scope fix_commit_3_parsed, # has same scope as 1 ], "feature": [feat_commit_parsed], }, version=second_version, ), first_version: Release( tagger=commit_author, committer=commit_author, tagged_date=current_datetime - timedelta(minutes=1), elements={"feature": [feat_commit_parsed]}, version=first_version, ), }, ) @pytest.fixture def release_history_w_brk_change( artificial_release_history: ReleaseHistory, stable_now_date: GetStableDateNowFn, ) -> ReleaseHistory: current_datetime = stable_now_date() latest_version = next(iter(artificial_release_history.released.keys())) next_version = latest_version.bump(LevelBump.MAJOR) brk_commit_subject = "fix a problem" brk_commit_type = "fix" brk_commit_scope = "cli" brk_change_msg = "this is a breaking change" brk_commit = Commit( Repo("."), Object.NULL_BIN_SHA, message=str.join( "\n\n", [ f"{brk_commit_type}({brk_commit_scope}): {brk_commit_subject}", f"BREAKING CHANGE: {brk_change_msg}", ], ), ) brk_commit_parsed = ParsedCommit( bump=LevelBump.MAJOR, type=brk_commit_type, scope=brk_commit_scope, descriptions=[brk_commit_subject], breaking_descriptions=[brk_change_msg], commit=brk_commit, ) return ReleaseHistory( unreleased={}, released={ next_version: Release( tagger=artificial_release_history.released[latest_version]["tagger"], committer=artificial_release_history.released[latest_version][ "committer" ], tagged_date=current_datetime, elements={"Bug Fixes": [brk_commit_parsed]}, version=next_version, ), **artificial_release_history.released, }, ) @pytest.fixture def release_history_w_multiple_brk_changes( release_history_w_brk_change: ReleaseHistory, stable_now_date: GetStableDateNowFn, ) -> ReleaseHistory: current_datetime = stable_now_date() latest_version = next(iter(release_history_w_brk_change.released.keys())) brk_commit_subject = "adding a revolutionary feature" brk_commit_type = "feat" brk_change_msg = "The feature changes everything in a breaking way" brk_commit = Commit( Repo("."), Object.NULL_BIN_SHA, message=str.join( "\n\n", [ f"{brk_commit_type}: {brk_commit_subject}", f"BREAKING CHANGE: {brk_change_msg}", ], ), ) brk_commit_parsed = ParsedCommit( bump=LevelBump.MAJOR, type=brk_commit_type, scope="", # No scope in this commit descriptions=[brk_commit_subject], breaking_descriptions=[brk_change_msg], commit=brk_commit, ) return ReleaseHistory( unreleased={}, released={ **release_history_w_brk_change.released, # Replaces and inserts a new commit of different type with breaking changes latest_version: Release( tagger=release_history_w_brk_change.released[latest_version]["tagger"], committer=release_history_w_brk_change.released[latest_version][ "committer" ], tagged_date=current_datetime, elements={ **release_history_w_brk_change.released[latest_version]["elements"], "Features": [brk_commit_parsed], }, version=latest_version, ), }, ) @pytest.fixture def single_release_history( artificial_release_history: ReleaseHistory, ) -> ReleaseHistory: version = list(artificial_release_history.released.keys())[-1] return ReleaseHistory( unreleased={}, released={ version: artificial_release_history.released[version], }, ) @pytest.fixture def release_history_w_a_notice( artificial_release_history: ReleaseHistory, stable_now_date: GetStableDateNowFn, ) -> ReleaseHistory: current_datetime = stable_now_date() latest_version = next(iter(artificial_release_history.released.keys())) next_version = latest_version.bump(LevelBump.PATCH) notice_commit_subject = "deprecate a type" notice_commit_type = "refactor" notice_commit_scope = "cli" release_notice = dedent( """\ This is a multline release notice that is made up of two lines. """ ) notice_commit = Commit( Repo("."), Object.NULL_BIN_SHA, message=str.join( "\n\n", [ f"{notice_commit_type}({notice_commit_scope}): {notice_commit_subject}", f"NOTICE: {release_notice}", ], ), ) notice_commit_parsed = ParsedCommit( bump=LevelBump.NO_RELEASE, type=notice_commit_type, scope=notice_commit_scope, descriptions=[notice_commit_subject], breaking_descriptions=[], release_notices=(release_notice.replace("\n", " ").strip(),), commit=notice_commit, ) return ReleaseHistory( unreleased={}, released={ next_version: Release( tagger=artificial_release_history.released[latest_version]["tagger"], committer=artificial_release_history.released[latest_version][ "committer" ], tagged_date=current_datetime, elements={"Refactoring": [notice_commit_parsed]}, version=next_version, ), **artificial_release_history.released, }, ) @pytest.fixture def release_history_w_notice_n_brk_change( artificial_release_history: ReleaseHistory, release_history_w_a_notice: ReleaseHistory, stable_now_date: GetStableDateNowFn, ) -> ReleaseHistory: current_datetime = stable_now_date() latest_version = next(iter(artificial_release_history.released.keys())) next_version = latest_version.bump(LevelBump.MAJOR) brk_commit_subject = "fix a problem" brk_commit_type = "fix" brk_commit_scope = "cli" brk_change_msg = "this is a breaking change" brk_commit = Commit( Repo("."), Object.NULL_BIN_SHA, message=str.join( "\n\n", [ f"{brk_commit_type}({brk_commit_scope}): {brk_commit_subject}", f"BREAKING CHANGE: {brk_change_msg}", ], ), ) brk_commit_parsed = ParsedCommit( bump=LevelBump.MAJOR, type=brk_commit_type, scope=brk_commit_scope, descriptions=[brk_commit_subject], breaking_descriptions=[brk_change_msg], commit=brk_commit, ) last_notice_release = next(iter(release_history_w_a_notice.released.keys())) return ReleaseHistory( unreleased={}, released={ next_version: Release( tagger=artificial_release_history.released[latest_version]["tagger"], committer=artificial_release_history.released[latest_version][ "committer" ], tagged_date=current_datetime, elements={ "Bug Fixes": [brk_commit_parsed], **release_history_w_a_notice.released[last_notice_release][ "elements" ], }, version=next_version, ), **artificial_release_history.released, }, ) @pytest.fixture def release_history_w_multiple_notices( release_history_w_a_notice: ReleaseHistory, stable_now_date: GetStableDateNowFn, ) -> ReleaseHistory: current_datetime = stable_now_date() latest_version = next(iter(release_history_w_a_notice.released.keys())) notice_commit_subject = "add a configurable feature" notice_commit_type = "feat" notice_commit_scope = "cli-config" release_notice = dedent( """\ This is a multline release notice that is its own paragraph to detail the configurable feature. """ ) notice_commit = Commit( Repo("."), Object.NULL_BIN_SHA, message=str.join( "\n\n", [ f"{notice_commit_type}({notice_commit_scope}): {notice_commit_subject}", f"NOTICE: {release_notice}", ], ), ) notice_commit_parsed = ParsedCommit( bump=LevelBump.MINOR, type=notice_commit_type, scope=notice_commit_scope, descriptions=[notice_commit_subject], breaking_descriptions=[], release_notices=(release_notice.replace("\n", " ").strip(),), commit=notice_commit, ) return ReleaseHistory( unreleased={}, released={ **release_history_w_a_notice.released, # Replaces and inserts a new commit of different type with breaking changes latest_version: Release( tagger=release_history_w_a_notice.released[latest_version]["tagger"], committer=release_history_w_a_notice.released[latest_version][ "committer" ], tagged_date=current_datetime, elements={ "Features": [notice_commit_parsed], **release_history_w_a_notice.released[latest_version]["elements"], }, version=latest_version, ), }, ) python-semantic-release-9.21.0/tests/unit/semantic_release/changelog/test_changelog_context.py000066400000000000000000000545331475670435200330510ustar00rootroot00000000000000from __future__ import annotations import os from datetime import datetime from textwrap import dedent from typing import TYPE_CHECKING from unittest import mock import pytest from git import Commit, Object, Repo from semantic_release.changelog.context import ChangelogMode, make_changelog_context from semantic_release.changelog.release_history import Release, ReleaseHistory from semantic_release.changelog.template import environment from semantic_release.commit_parser import ParsedCommit from semantic_release.enums import LevelBump from semantic_release.hvcs import Bitbucket, Gitea, Github, Gitlab from semantic_release.version.translator import Version from tests.const import EXAMPLE_CHANGELOG_MD_CONTENT if TYPE_CHECKING: from pathlib import Path from git import Actor from tests.fixtures.git_repo import BuildRepoFn @pytest.fixture def changelog_tpl_github_context() -> str: """Returns an changelog template which uses all the GitHub configured filters""" return dedent( r""" # Changelog > Repository: {{ "[%s](%s)" | format(context.hvcs_type | capitalize, "/" | create_repo_url) }} ## v2.0.0 {{ "[Change Summary](%s)" | format("v1.0.0" | compare_url("v2.0.0")) }} ### Bug Fixes - Fixed a minor bug {{ "([#%s](%s))" | format(22, 22 | pull_request_url) }} - **cli:** fix a problem {{ "([%s](%s))" | format("000000", "000000" | commit_hash_url) }} ### Resolved Issues - {{ "[#%s](%s)" | format(19, 19 | issue_url) }} """ ).lstrip() @pytest.fixture def changelog_tpl_gitea_context() -> str: """Returns an changelog template which uses all the Gitea configured filters""" return dedent( r""" # Changelog > Repository: {{ "[%s](%s)" | format(context.hvcs_type | capitalize, "/" | create_repo_url) }} ## v2.0.0 ### Bug Fixes - Fixed a minor bug {{ "([#%s](%s))" | format(22, 22 | pull_request_url) }} - **cli:** fix a problem {{ "([%s](%s))" | format("000000", "000000" | commit_hash_url) }} ### Resolved Issues - {{ "[#%s](%s)" | format(19, 19 | issue_url) }} """ ).lstrip() @pytest.fixture def changelog_tpl_gitlab_context() -> str: """Returns an changelog template which uses all the GitLab configured filters""" return dedent( r""" # Changelog > Repository: {{ "[%s](%s)" | format(context.hvcs_type | capitalize, "/" | create_repo_url) }} ## v2.0.0 {{ "[Change Summary](%s)" | format("v1.0.0" | compare_url("v2.0.0")) }} ### Bug Fixes - Fixed a minor bug {{ "([#%s](%s))" | format(22, 22 | merge_request_url) }} - Fixed a performance bug {{ "([#%s](%s))" | format(25, 25 | pull_request_url) }} - **cli:** fix a problem {{ "([%s](%s))" | format("000000", "000000" | commit_hash_url) }} ### Resolved Issues - {{ "[#%s](%s)" | format(19, 19 | issue_url) }} """ ).lstrip() @pytest.fixture def changelog_tpl_bitbucket_context() -> str: """Returns an changelog template which uses all the BitBucket configured filters""" return dedent( r""" # Changelog > Repository: {{ "[%s](%s)" | format(context.hvcs_type | capitalize, "/" | create_repo_url) }} ## v2.0.0 {{ "[Change Summary](%s)" | format("v1.0.0" | compare_url("v2.0.0")) }} ### Bug Fixes - Fixed a minor bug {{ "([#%s](%s))" | format(22, 22 | pull_request_url) }} - **cli:** fix a problem {{ "([%s](%s))" | format("000000", "000000" | commit_hash_url) }} """ ).lstrip() @pytest.fixture def artificial_release_history(commit_author: Actor): version = Version.parse("1.0.0") commit_subject = "fix(cli): fix a problem" fix_commit = Commit( Repo("."), Object.NULL_HEX_SHA[:20].encode("utf-8"), message=commit_subject, ) fix_commit_parsed = ParsedCommit( bump=LevelBump.PATCH, type="fix", scope="cli", descriptions=[commit_subject], breaking_descriptions=[], commit=fix_commit, ) commit_subject = "feat(cli): add a new feature" feat_commit = Commit( Repo("."), Object.NULL_HEX_SHA[:20].encode("utf-8"), message=commit_subject, ) feat_commit_parsed = ParsedCommit( bump=LevelBump.MINOR, type="feat", scope="cli", descriptions=[commit_subject], breaking_descriptions=[], commit=feat_commit, ) return ReleaseHistory( unreleased={ "feature": [feat_commit_parsed], }, released={ version: Release( tagger=commit_author, committer=commit_author, tagged_date=datetime.now(), elements={ "feature": [feat_commit_parsed], "fix": [fix_commit_parsed], }, version=version, ) }, ) def test_changelog_context_bitbucket( changelog_tpl_bitbucket_context: str, example_git_https_url: str, artificial_release_history: ReleaseHistory, changelog_md_file: Path, ): hvcs = Bitbucket(example_git_https_url) expected_changelog = str.join( "\n", [ "# Changelog", "", f'> Repository: [{hvcs.__class__.__name__.capitalize()}]({hvcs.create_repo_url("")})', "", "## v2.0.0", "", f'[Change Summary]({hvcs.compare_url("v1.0.0", "v2.0.0")})', "", "### Bug Fixes", "", f"- Fixed a minor bug ([#22]({hvcs.pull_request_url(22)}))", f'- **cli:** fix a problem ([000000]({hvcs.commit_hash_url("000000")}))', "", ], ) env = environment(trim_blocks=True, lstrip_blocks=True, keep_trailing_newline=True) context = make_changelog_context( hvcs_client=hvcs, release_history=artificial_release_history, mode=ChangelogMode.INIT, prev_changelog_file=changelog_md_file, insertion_flag="", mask_initial_release=False, ) context.bind_to_environment(env) # Create changelog from template with environment actual_changelog = env.from_string(changelog_tpl_bitbucket_context).render() # Evaluate assert expected_changelog == actual_changelog def test_changelog_context_github( changelog_tpl_github_context: str, example_git_https_url: str, artificial_release_history: ReleaseHistory, changelog_md_file: Path, ): hvcs = Github(example_git_https_url) expected_changelog = str.join( "\n", [ "# Changelog", "", f'> Repository: [{hvcs.__class__.__name__.capitalize()}]({hvcs.create_repo_url("")})', "", "## v2.0.0", "", f'[Change Summary]({hvcs.compare_url("v1.0.0", "v2.0.0")})', "", "### Bug Fixes", "", f"- Fixed a minor bug ([#22]({hvcs.pull_request_url(22)}))", f'- **cli:** fix a problem ([000000]({hvcs.commit_hash_url("000000")}))', "", "### Resolved Issues", "", f"- [#19]({hvcs.issue_url(19)})", "", ], ) env = environment(trim_blocks=True, lstrip_blocks=True, keep_trailing_newline=True) context = make_changelog_context( hvcs_client=hvcs, release_history=artificial_release_history, mode=ChangelogMode.INIT, prev_changelog_file=changelog_md_file, insertion_flag="", mask_initial_release=False, ) context.bind_to_environment(env) # Create changelog from template with environment actual_changelog = env.from_string(changelog_tpl_github_context).render() # Evaluate assert expected_changelog == actual_changelog def test_changelog_context_gitea( changelog_tpl_gitea_context: str, example_git_https_url: str, artificial_release_history: ReleaseHistory, changelog_md_file: Path, ): hvcs = Gitea(example_git_https_url) expected_changelog = str.join( "\n", [ "# Changelog", "", f'> Repository: [{hvcs.__class__.__name__.capitalize()}]({hvcs.create_repo_url("")})', "", "## v2.0.0", "", "### Bug Fixes", "", f"- Fixed a minor bug ([#22]({hvcs.pull_request_url(22)}))", f'- **cli:** fix a problem ([000000]({hvcs.commit_hash_url("000000")}))', "", "### Resolved Issues", "", f"- [#19]({hvcs.issue_url(19)})", "", ], ) env = environment(trim_blocks=True, lstrip_blocks=True, keep_trailing_newline=True) context = make_changelog_context( hvcs_client=hvcs, release_history=artificial_release_history, mode=ChangelogMode.INIT, prev_changelog_file=changelog_md_file, insertion_flag="", mask_initial_release=False, ) context.bind_to_environment(env) # Create changelog from template with environment actual_changelog = env.from_string(changelog_tpl_gitea_context).render() # Evaluate assert expected_changelog == actual_changelog def test_changelog_context_gitlab( changelog_tpl_gitlab_context: str, example_git_https_url: str, artificial_release_history: ReleaseHistory, changelog_md_file: Path, ): hvcs = Gitlab(example_git_https_url) expected_changelog = str.join( "\n", [ "# Changelog", "", f'> Repository: [{hvcs.__class__.__name__.capitalize()}]({hvcs.create_repo_url("")})', "", "## v2.0.0", "", f'[Change Summary]({hvcs.compare_url("v1.0.0", "v2.0.0")})', "", "### Bug Fixes", "", f"- Fixed a minor bug ([#22]({hvcs.merge_request_url(22)}))", f"- Fixed a performance bug ([#25]({hvcs.pull_request_url(25)}))", f'- **cli:** fix a problem ([000000]({hvcs.commit_hash_url("000000")}))', "", "### Resolved Issues", "", f"- [#19]({hvcs.issue_url(19)})", "", ], ) env = environment(trim_blocks=True, lstrip_blocks=True, keep_trailing_newline=True) context = make_changelog_context( hvcs_client=hvcs, release_history=artificial_release_history, mode=ChangelogMode.INIT, prev_changelog_file=changelog_md_file, insertion_flag="", mask_initial_release=False, ) context.bind_to_environment(env) # Create changelog from template with environment actual_changelog = env.from_string(changelog_tpl_gitlab_context).render() # Evaluate assert expected_changelog == actual_changelog def test_changelog_context_read_file( example_git_https_url: str, build_configured_base_repo: BuildRepoFn, artificial_release_history: ReleaseHistory, changelog_md_file: Path, change_to_ex_proj_dir: Path, example_project_dir: Path, ): build_configured_base_repo(example_project_dir) # normalize expected to os specific newlines expected_changelog = str.join( os.linesep, [ *[ line.replace("\r", "") for line in EXAMPLE_CHANGELOG_MD_CONTENT.strip().split("\n") ], "", ], ) changelog_tpl = """{{ "%s" | read_file | trim }}%ls""".replace( "%s", str(changelog_md_file) ).replace("%ls", os.linesep) env = environment( newline_sequence="\n" if os.linesep == "\n" else "\r\n", trim_blocks=True, lstrip_blocks=True, keep_trailing_newline=True, autoescape=False, ) context = make_changelog_context( hvcs_client=Gitlab(example_git_https_url), release_history=artificial_release_history, mode=ChangelogMode.UPDATE, prev_changelog_file=changelog_md_file, insertion_flag="", mask_initial_release=False, ) context.bind_to_environment(env) # Create changelog from template with environment actual_changelog = env.from_string(changelog_tpl).render() # Evaluate assert expected_changelog.encode() == actual_changelog.encode() @pytest.mark.parametrize("file_path", ["", "nonexistent.md"]) def test_changelog_context_read_file_fails_gracefully( example_git_https_url: str, artificial_release_history: ReleaseHistory, changelog_md_file: Path, file_path: str, ): changelog_tpl = """{{ "%s" | read_file }}""".replace("%s", file_path) expected_changelog = "" env = environment(trim_blocks=True, lstrip_blocks=True, keep_trailing_newline=True) context = make_changelog_context( hvcs_client=Gitlab(example_git_https_url), release_history=artificial_release_history, mode=ChangelogMode.UPDATE, prev_changelog_file=changelog_md_file, insertion_flag="", mask_initial_release=False, ) context.bind_to_environment(env) # Create changelog from template with environment actual_changelog = env.from_string(changelog_tpl).render() # Evaluate assert expected_changelog == actual_changelog def test_changelog_context_autofit_text_width( example_git_https_url: str, artificial_release_history: ReleaseHistory, changelog_md_file: Path, ): changelog_tpl = """{{ "This is a long line that should be autofitted" | autofit_text_width(20) }}""" expected_changelog = "This is a long line\nthat should be\nautofitted" env = environment(trim_blocks=True, lstrip_blocks=True, keep_trailing_newline=True) context = make_changelog_context( hvcs_client=Gitlab(example_git_https_url), release_history=artificial_release_history, mode=ChangelogMode.UPDATE, prev_changelog_file=changelog_md_file, insertion_flag="", mask_initial_release=False, ) context.bind_to_environment(env) # Create changelog from template with environment actual_changelog = env.from_string(changelog_tpl).render() # Evaluate assert expected_changelog == actual_changelog def test_changelog_context_autofit_text_width_w_indent( example_git_https_url: str, artificial_release_history: ReleaseHistory, changelog_md_file: Path, ): changelog_tpl = """{{ "This is a long line that should be autofitted" | autofit_text_width(20, indent_size=2) }}""" expected_changelog = "This is a long line\n that should be\n autofitted" env = environment(trim_blocks=True, lstrip_blocks=True, keep_trailing_newline=True) context = make_changelog_context( hvcs_client=Gitlab(example_git_https_url), release_history=artificial_release_history, mode=ChangelogMode.UPDATE, prev_changelog_file=changelog_md_file, insertion_flag="", mask_initial_release=False, ) context.bind_to_environment(env) # Create changelog from template with environment actual_changelog = env.from_string(changelog_tpl).render() # Evaluate assert expected_changelog == actual_changelog def test_changelog_context_sort_numerically( example_git_https_url: str, artificial_release_history: ReleaseHistory, changelog_md_file: Path, ): changelog_tpl = dedent( """\ {{ [ ".. _#5: link", ".. _PR#3: link", ".. _PR#10: link", ".. _#100: link" ] | sort_numerically | join("\\n") }} """ ) expected_changelog = dedent( """\ .. _#5: link .. _#100: link .. _PR#3: link .. _PR#10: link """ ) env = environment(trim_blocks=True, lstrip_blocks=True, keep_trailing_newline=True) context = make_changelog_context( hvcs_client=Gitlab(example_git_https_url), release_history=artificial_release_history, mode=ChangelogMode.UPDATE, prev_changelog_file=changelog_md_file, insertion_flag="", mask_initial_release=False, ) context.bind_to_environment(env) # Create changelog from template with environment actual_changelog = env.from_string(changelog_tpl).render() # Evaluate assert expected_changelog == actual_changelog def test_changelog_context_sort_numerically_reverse( example_git_https_url: str, artificial_release_history: ReleaseHistory, changelog_md_file: Path, ): changelog_tpl = dedent( """\ {{ [ ".. _#5: link", ".. _PR#3: link", ".. _PR#10: link", ".. _#100: link" ] | sort_numerically(reverse=True) | join("\\n") }} """ ) expected_changelog = dedent( """\ .. _#100: link .. _#5: link .. _PR#10: link .. _PR#3: link """ ) env = environment(trim_blocks=True, lstrip_blocks=True, keep_trailing_newline=True) context = make_changelog_context( hvcs_client=Gitlab(example_git_https_url), release_history=artificial_release_history, mode=ChangelogMode.UPDATE, prev_changelog_file=changelog_md_file, insertion_flag="", mask_initial_release=False, ) context.bind_to_environment(env) # Create changelog from template with environment actual_changelog = env.from_string(changelog_tpl).render() # Evaluate assert expected_changelog == actual_changelog def test_changelog_context_pypi_url_filter( example_git_https_url: str, artificial_release_history: ReleaseHistory, changelog_md_file: Path, ): changelog_tpl = dedent( """\ {{ "example-package" | create_pypi_url }} """ ) expected_changelog = dedent( """\ https://pypi.org/project/example-package """ ) env = environment(trim_blocks=True, lstrip_blocks=True, keep_trailing_newline=True) context = make_changelog_context( hvcs_client=Gitlab(example_git_https_url), release_history=artificial_release_history, mode=ChangelogMode.UPDATE, prev_changelog_file=changelog_md_file, insertion_flag="", mask_initial_release=False, ) context.bind_to_environment(env) # Create changelog from template with environment actual_changelog = env.from_string(changelog_tpl).render() # Evaluate assert expected_changelog == actual_changelog def test_changelog_context_pypi_url_filter_tagged( example_git_https_url: str, artificial_release_history: ReleaseHistory, changelog_md_file: Path, ): version = "1.0.0" changelog_tpl = dedent( """\ {% set release = context.history.released.values() | first %}{{ "example-package" | create_pypi_url(release.version | string) }} """ ) expected_changelog = dedent( f"""\ https://pypi.org/project/example-package/{version} """ ) env = environment(trim_blocks=True, lstrip_blocks=True, keep_trailing_newline=True) context = make_changelog_context( hvcs_client=Gitlab(example_git_https_url), release_history=artificial_release_history, mode=ChangelogMode.UPDATE, prev_changelog_file=changelog_md_file, insertion_flag="", mask_initial_release=False, ) context.bind_to_environment(env) # Create changelog from template with environment actual_changelog = env.from_string(changelog_tpl).render() # Evaluate assert expected_changelog == actual_changelog @pytest.mark.parametrize("hvcs_client_class", [Github, Gitlab, Gitea]) def test_changelog_context_release_url_filter( example_git_https_url: str, hvcs_client_class: type[Github | Gitlab | Gitea], artificial_release_history: ReleaseHistory, changelog_md_file: Path, ): version = list(artificial_release_history.released.keys())[-1] changelog_tpl = dedent( """\ {% set release = context.history.released.values() | first %}{{ "[%s](%s)" | format( release.version.as_tag(), release.version.as_tag() | create_release_url, ) }} """ ) with mock.patch.dict(os.environ, {}, clear=True): hvcs_client = hvcs_client_class(remote_url=example_git_https_url) expected_changelog = dedent( f"""\ [{version.as_tag()}]({hvcs_client.create_release_url(version.as_tag())}) """ ) env = environment(trim_blocks=True, lstrip_blocks=True, keep_trailing_newline=True) context = make_changelog_context( hvcs_client=hvcs_client, release_history=artificial_release_history, mode=ChangelogMode.UPDATE, prev_changelog_file=changelog_md_file, insertion_flag="", mask_initial_release=False, ) context.bind_to_environment(env) # Create changelog from template with environment actual_changelog = env.from_string(changelog_tpl).render() # Evaluate assert expected_changelog == actual_changelog @pytest.mark.parametrize("hvcs_client_class", [Github, Gitlab, Gitea, Bitbucket]) def test_changelog_context_format_w_official_name_filter( example_git_https_url: str, hvcs_client_class: type[Github | Gitlab | Gitea], artificial_release_history: ReleaseHistory, changelog_md_file: Path, ): changelog_tpl = dedent( """\ {{ "%s" | format_w_official_vcs_name }} {{ "{}" | format_w_official_vcs_name }} {{ "{vcs_name}" | format_w_official_vcs_name }} """ ) with mock.patch.dict(os.environ, {}, clear=True): hvcs_client = hvcs_client_class(remote_url=example_git_https_url) expected_changelog = dedent( f"""\ {hvcs_client.OFFICIAL_NAME} {hvcs_client.OFFICIAL_NAME} {hvcs_client.OFFICIAL_NAME} """ ) env = environment(trim_blocks=True, lstrip_blocks=True, keep_trailing_newline=True) context = make_changelog_context( hvcs_client=hvcs_client, release_history=artificial_release_history, mode=ChangelogMode.UPDATE, prev_changelog_file=changelog_md_file, insertion_flag="", mask_initial_release=False, ) context.bind_to_environment(env) # Create changelog from template with environment actual_changelog = env.from_string(changelog_tpl).render() # Evaluate assert expected_changelog == actual_changelog python-semantic-release-9.21.0/tests/unit/semantic_release/changelog/test_default_changelog.py000066400000000000000000001251641475670435200330100ustar00rootroot00000000000000from __future__ import annotations from pathlib import Path from typing import TYPE_CHECKING import pytest # NOTE: use backport with newer API from importlib_resources import files import semantic_release from semantic_release.changelog.context import ChangelogMode, make_changelog_context from semantic_release.cli.changelog_writer import render_default_changelog_file from semantic_release.cli.config import ChangelogOutputFormat from semantic_release.commit_parser import ParsedCommit from semantic_release.hvcs import Bitbucket, Gitea, Github, Gitlab if TYPE_CHECKING: from semantic_release.changelog.release_history import ReleaseHistory @pytest.fixture(scope="module") def default_changelog_template() -> str: """Retrieve the semantic-release default changelog template.""" version_notes_template = files(semantic_release.__name__).joinpath( Path("data", "templates", "angular", "md", "CHANGELOG.md.j2") ) return version_notes_template.read_text(encoding="utf-8") @pytest.mark.parametrize("hvcs_client", [Github, Gitlab, Gitea, Bitbucket]) def test_default_changelog_template( hvcs_client: type[Bitbucket | Gitea | Github | Gitlab], example_git_https_url: str, artificial_release_history: ReleaseHistory, changelog_md_file: Path, today_date_str: str, ): artificial_release_history.unreleased = {} # Wipe out unreleased hvcs = hvcs_client(example_git_https_url) latest_version = next(iter(artificial_release_history.released.keys())) latest_release = artificial_release_history.released[latest_version] first_version = list(artificial_release_history.released.keys())[-1] feat_commit_obj = latest_release["elements"]["feature"][0] fix_commit_obj_1 = latest_release["elements"]["fix"][0] fix_commit_obj_2 = latest_release["elements"]["fix"][1] fix_commit_obj_3 = latest_release["elements"]["fix"][2] assert isinstance(feat_commit_obj, ParsedCommit) assert isinstance(fix_commit_obj_1, ParsedCommit) assert isinstance(fix_commit_obj_2, ParsedCommit) assert isinstance(fix_commit_obj_3, ParsedCommit) feat_commit_url = hvcs.commit_hash_url(feat_commit_obj.commit.hexsha) feat_description = str.join("\n", feat_commit_obj.descriptions) fix_commit_1_url = hvcs.commit_hash_url(fix_commit_obj_1.commit.hexsha) fix_commit_1_description = str.join("\n", fix_commit_obj_1.descriptions) fix_commit_2_url = hvcs.commit_hash_url(fix_commit_obj_2.commit.hexsha) fix_commit_2_description = str.join("\n", fix_commit_obj_2.descriptions) fix_commit_3_url = hvcs.commit_hash_url(fix_commit_obj_3.commit.hexsha) fix_commit_3_description = str.join("\n", fix_commit_obj_3.descriptions) expected_changelog = str.join( "\n", [ "# CHANGELOG", "", "", f"## v{latest_version} ({today_date_str})", "", "### Feature", "", # Due to the 100 character limit, hash url will be on the second line f"- **{feat_commit_obj.scope}**: {feat_description.capitalize()}", f" ([`{feat_commit_obj.commit.hexsha[:7]}`]({feat_commit_url}))", "", "### Fix", "", # Commit 2 is first because it has no scope # Due to the 100 character limit, hash url will be on the second line f"- {fix_commit_2_description.capitalize()}", f" ([`{fix_commit_obj_2.commit.hexsha[:7]}`]({fix_commit_2_url}))", "", # Commit 3 is second because it starts with an A even though it has the same scope as 1 # Due to the 100 character limit, hash url will be on the second line f"- **{fix_commit_obj_3.scope}**: {fix_commit_3_description.capitalize()}", f" ([`{fix_commit_obj_3.commit.hexsha[:7]}`]({fix_commit_3_url}))", "", # Due to the 100 character limit, hash url will be on the second line f"- **{fix_commit_obj_1.scope}**: {fix_commit_1_description.capitalize()}", f" ([`{fix_commit_obj_1.commit.hexsha[:7]}`]({fix_commit_1_url}))", "", "", f"## v{first_version} ({today_date_str})", "", "- Initial Release", ], ) actual_changelog = render_default_changelog_file( output_format=ChangelogOutputFormat.MARKDOWN, changelog_context=make_changelog_context( hvcs_client=hvcs, release_history=artificial_release_history, mode=ChangelogMode.INIT, prev_changelog_file=changelog_md_file, insertion_flag="", mask_initial_release=True, ), changelog_style="angular", ) assert expected_changelog == actual_changelog @pytest.mark.parametrize("hvcs_client", [Github, Gitlab, Gitea, Bitbucket]) def test_default_changelog_template_w_a_brk_change( hvcs_client: type[Bitbucket | Gitea | Github | Gitlab], example_git_https_url: str, release_history_w_brk_change: ReleaseHistory, changelog_md_file: Path, today_date_str: str, ): hvcs = hvcs_client(example_git_https_url) releases = iter(release_history_w_brk_change.released.keys()) latest_version = next(releases) latest_release = release_history_w_brk_change.released[latest_version] previous_version = next(releases) previous_release = release_history_w_brk_change.released[previous_version] first_version = list(release_history_w_brk_change.released.keys())[-1] brk_fix_commit_obj = latest_release["elements"]["Bug Fixes"][0] feat_commit_obj = previous_release["elements"]["feature"][0] fix_commit_obj_1 = previous_release["elements"]["fix"][0] fix_commit_obj_2 = previous_release["elements"]["fix"][1] fix_commit_obj_3 = previous_release["elements"]["fix"][2] assert isinstance(brk_fix_commit_obj, ParsedCommit) assert isinstance(feat_commit_obj, ParsedCommit) assert isinstance(fix_commit_obj_1, ParsedCommit) assert isinstance(fix_commit_obj_2, ParsedCommit) assert isinstance(fix_commit_obj_3, ParsedCommit) brk_fix_commit_url = hvcs.commit_hash_url(brk_fix_commit_obj.commit.hexsha) brk_fix_description = str.join("\n", brk_fix_commit_obj.descriptions) brk_fix_brking_description = str.join( "\n", brk_fix_commit_obj.breaking_descriptions ) feat_commit_url = hvcs.commit_hash_url(feat_commit_obj.commit.hexsha) feat_description = str.join("\n", feat_commit_obj.descriptions) fix_commit_1_url = hvcs.commit_hash_url(fix_commit_obj_1.commit.hexsha) fix_commit_1_description = str.join("\n", fix_commit_obj_1.descriptions) fix_commit_2_url = hvcs.commit_hash_url(fix_commit_obj_2.commit.hexsha) fix_commit_2_description = str.join("\n", fix_commit_obj_2.descriptions) fix_commit_3_url = hvcs.commit_hash_url(fix_commit_obj_3.commit.hexsha) fix_commit_3_description = str.join("\n", fix_commit_obj_3.descriptions) expected_changelog = str.join( "\n", [ "# CHANGELOG", "", "", f"## v{latest_version} ({today_date_str})", "", "### Bug Fixes", "", # Due to the 100 character limit, hash url will be on the second line f"- **{brk_fix_commit_obj.scope}**: {brk_fix_description.capitalize()}", f" ([`{brk_fix_commit_obj.commit.hexsha[:7]}`]({brk_fix_commit_url}))", "", "### Breaking Changes", "", # Currently does not consider the 100 character limit because the current # descriptions are short enough to fit in one line "- {commit_scope}{change_desc}".format( commit_scope=( f"**{brk_fix_commit_obj.scope}**: " if brk_fix_commit_obj.scope else "" ), change_desc=brk_fix_brking_description.capitalize(), ), "", "", f"## v{previous_version} ({today_date_str})", "", "### Feature", "", # Due to the 100 character limit, hash url will be on the second line f"- **{feat_commit_obj.scope}**: {feat_description.capitalize()}", f" ([`{feat_commit_obj.commit.hexsha[:7]}`]({feat_commit_url}))", "", "### Fix", "", # Commit 2 is first because it has no scope # Due to the 100 character limit, hash url will be on the second line f"- {fix_commit_2_description.capitalize()}", f" ([`{fix_commit_obj_2.commit.hexsha[:7]}`]({fix_commit_2_url}))", "", # Commit 3 is second because it starts with an A even though it has the same scope as 1 # Due to the 100 character limit, hash url will be on the second line f"- **{fix_commit_obj_3.scope}**: {fix_commit_3_description.capitalize()}", f" ([`{fix_commit_obj_3.commit.hexsha[:7]}`]({fix_commit_3_url}))", "", # Due to the 100 character limit, hash url will be on the second line f"- **{fix_commit_obj_1.scope}**: {fix_commit_1_description.capitalize()}", f" ([`{fix_commit_obj_1.commit.hexsha[:7]}`]({fix_commit_1_url}))", "", "", f"## v{first_version} ({today_date_str})", "", "- Initial Release", ], ) actual_changelog = render_default_changelog_file( output_format=ChangelogOutputFormat.MARKDOWN, changelog_context=make_changelog_context( hvcs_client=hvcs, release_history=release_history_w_brk_change, mode=ChangelogMode.INIT, prev_changelog_file=changelog_md_file, insertion_flag="", mask_initial_release=True, ), changelog_style="angular", ) assert expected_changelog == actual_changelog @pytest.mark.parametrize("hvcs_client", [Github, Gitlab, Gitea, Bitbucket]) def test_default_changelog_template_w_multiple_brk_changes( hvcs_client: type[Bitbucket | Gitea | Github | Gitlab], example_git_https_url: str, release_history_w_multiple_brk_changes: ReleaseHistory, changelog_md_file: Path, today_date_str: str, ): hvcs = hvcs_client(example_git_https_url) releases = iter(release_history_w_multiple_brk_changes.released.keys()) latest_version = next(releases) latest_release = release_history_w_multiple_brk_changes.released[latest_version] previous_version = next(releases) previous_release = release_history_w_multiple_brk_changes.released[previous_version] first_version = list(release_history_w_multiple_brk_changes.released.keys())[-1] brk_feat_commit_obj = latest_release["elements"]["Features"][0] brk_fix_commit_obj = latest_release["elements"]["Bug Fixes"][0] feat_commit_obj = previous_release["elements"]["feature"][0] fix_commit_obj_1 = previous_release["elements"]["fix"][0] fix_commit_obj_2 = previous_release["elements"]["fix"][1] fix_commit_obj_3 = previous_release["elements"]["fix"][2] assert isinstance(brk_feat_commit_obj, ParsedCommit) assert isinstance(brk_fix_commit_obj, ParsedCommit) assert isinstance(feat_commit_obj, ParsedCommit) assert isinstance(fix_commit_obj_1, ParsedCommit) assert isinstance(fix_commit_obj_2, ParsedCommit) assert isinstance(fix_commit_obj_3, ParsedCommit) brk_feat_commit_url = hvcs.commit_hash_url(brk_feat_commit_obj.commit.hexsha) brk_feat_description = str.join("\n", brk_feat_commit_obj.descriptions) brk_feat_brking_description = str.join( "\n", brk_feat_commit_obj.breaking_descriptions ) brk_fix_commit_url = hvcs.commit_hash_url(brk_fix_commit_obj.commit.hexsha) brk_fix_description = str.join("\n", brk_fix_commit_obj.descriptions) brk_fix_brking_description = str.join( "\n", brk_fix_commit_obj.breaking_descriptions ) feat_commit_url = hvcs.commit_hash_url(feat_commit_obj.commit.hexsha) feat_description = str.join("\n", feat_commit_obj.descriptions) fix_commit_1_url = hvcs.commit_hash_url(fix_commit_obj_1.commit.hexsha) fix_commit_1_description = str.join("\n", fix_commit_obj_1.descriptions) fix_commit_2_url = hvcs.commit_hash_url(fix_commit_obj_2.commit.hexsha) fix_commit_2_description = str.join("\n", fix_commit_obj_2.descriptions) fix_commit_3_url = hvcs.commit_hash_url(fix_commit_obj_3.commit.hexsha) fix_commit_3_description = str.join("\n", fix_commit_obj_3.descriptions) expected_changelog = str.join( "\n", [ "# CHANGELOG", "", "", f"## v{latest_version} ({today_date_str})", "", "### Bug Fixes", "", # Due to the 100 character limit, hash url will be on the second line f"- **{brk_fix_commit_obj.scope}**: {brk_fix_description.capitalize()}", f" ([`{brk_fix_commit_obj.commit.hexsha[:7]}`]({brk_fix_commit_url}))", "", "### Features", "", # Due to the 100 character limit, hash url will be on the second line f"- {brk_feat_description.capitalize()}", f" ([`{brk_feat_commit_obj.commit.hexsha[:7]}`]({brk_feat_commit_url}))", "", "### Breaking Changes", "", # Currently does not consider the 100 character limit because the current # descriptions are short enough to fit in one line "- {commit_scope}{change_desc}".format( commit_scope=( f"**{brk_feat_commit_obj.scope}**: " if brk_feat_commit_obj.scope else "" ), change_desc=brk_feat_brking_description.capitalize(), ), "", "- {commit_scope}{change_desc}".format( commit_scope=( f"**{brk_fix_commit_obj.scope}**: " if brk_fix_commit_obj.scope else "" ), change_desc=brk_fix_brking_description.capitalize(), ), "", "", f"## v{previous_version} ({today_date_str})", "", "### Feature", "", # Due to the 100 character limit, hash url will be on the second line f"- **{feat_commit_obj.scope}**: {feat_description.capitalize()}", f" ([`{feat_commit_obj.commit.hexsha[:7]}`]({feat_commit_url}))", "", "### Fix", "", # Commit 2 is first because it has no scope # Due to the 100 character limit, hash url will be on the second line f"- {fix_commit_2_description.capitalize()}", f" ([`{fix_commit_obj_2.commit.hexsha[:7]}`]({fix_commit_2_url}))", "", # Commit 3 is second because it starts with an A even though it has the same scope as 1 # Due to the 100 character limit, hash url will be on the second line f"- **{fix_commit_obj_3.scope}**: {fix_commit_3_description.capitalize()}", f" ([`{fix_commit_obj_3.commit.hexsha[:7]}`]({fix_commit_3_url}))", "", # Due to the 100 character limit, hash url will be on the second line f"- **{fix_commit_obj_1.scope}**: {fix_commit_1_description.capitalize()}", f" ([`{fix_commit_obj_1.commit.hexsha[:7]}`]({fix_commit_1_url}))", "", "", f"## v{first_version} ({today_date_str})", "", "- Initial Release", ], ) actual_changelog = render_default_changelog_file( output_format=ChangelogOutputFormat.MARKDOWN, changelog_context=make_changelog_context( hvcs_client=hvcs, release_history=release_history_w_multiple_brk_changes, mode=ChangelogMode.INIT, prev_changelog_file=changelog_md_file, insertion_flag="", mask_initial_release=True, ), changelog_style="angular", ) assert expected_changelog == actual_changelog @pytest.mark.parametrize("hvcs_client", [Github, Gitlab, Gitea, Bitbucket]) def test_default_changelog_template_no_initial_release_mask( hvcs_client: type[Bitbucket | Gitea | Github | Gitlab], example_git_https_url: str, artificial_release_history: ReleaseHistory, changelog_md_file: Path, today_date_str: str, ): artificial_release_history.unreleased = {} # Wipe out unreleased hvcs = hvcs_client(example_git_https_url) latest_version = next(iter(artificial_release_history.released.keys())) latest_release = artificial_release_history.released[latest_version] first_version = list(artificial_release_history.released.keys())[-1] feat_commit_obj = latest_release["elements"]["feature"][0] fix_commit_obj_1 = latest_release["elements"]["fix"][0] fix_commit_obj_2 = latest_release["elements"]["fix"][1] fix_commit_obj_3 = latest_release["elements"]["fix"][2] assert isinstance(feat_commit_obj, ParsedCommit) assert isinstance(fix_commit_obj_1, ParsedCommit) assert isinstance(fix_commit_obj_2, ParsedCommit) assert isinstance(fix_commit_obj_3, ParsedCommit) feat_commit_url = hvcs.commit_hash_url(feat_commit_obj.commit.hexsha) feat_description = str.join("\n", feat_commit_obj.descriptions) fix_commit_1_url = hvcs.commit_hash_url(fix_commit_obj_1.commit.hexsha) fix_commit_1_description = str.join("\n", fix_commit_obj_1.descriptions) fix_commit_2_url = hvcs.commit_hash_url(fix_commit_obj_2.commit.hexsha) fix_commit_2_description = str.join("\n", fix_commit_obj_2.descriptions) fix_commit_3_url = hvcs.commit_hash_url(fix_commit_obj_3.commit.hexsha) fix_commit_3_description = str.join("\n", fix_commit_obj_3.descriptions) expected_changelog = str.join( "\n", [ "# CHANGELOG", "", "", f"## v{latest_version} ({today_date_str})", "", "### Feature", "", # Due to the 100 character limit, hash url will be on the second line f"- **{feat_commit_obj.scope}**: {feat_description.capitalize()}", f" ([`{feat_commit_obj.commit.hexsha[:7]}`]({feat_commit_url}))", "", "### Fix", "", # Commit 2 is first because it has no scope # Due to the 100 character limit, hash url will be on the second line f"- {fix_commit_2_description.capitalize()}", f" ([`{fix_commit_obj_2.commit.hexsha[:7]}`]({fix_commit_2_url}))", "", # Commit 3 is second because it starts with an A even though it has the same scope as 1 # Due to the 100 character limit, hash url will be on the second line f"- **{fix_commit_obj_3.scope}**: {fix_commit_3_description.capitalize()}", f" ([`{fix_commit_obj_3.commit.hexsha[:7]}`]({fix_commit_3_url}))", "", # Due to the 100 character limit, hash url will be on the second line f"- **{fix_commit_obj_1.scope}**: {fix_commit_1_description.capitalize()}", f" ([`{fix_commit_obj_1.commit.hexsha[:7]}`]({fix_commit_1_url}))", "", "", f"## v{first_version} ({today_date_str})", "", "### Feature", "", # Due to the 100 character limit, hash url will be on the second line f"- **{feat_commit_obj.scope}**: {feat_description.capitalize()}", f" ([`{feat_commit_obj.commit.hexsha[:7]}`]({feat_commit_url}))", ], ) actual_changelog = render_default_changelog_file( output_format=ChangelogOutputFormat.MARKDOWN, changelog_context=make_changelog_context( hvcs_client=hvcs, release_history=artificial_release_history, mode=ChangelogMode.INIT, prev_changelog_file=changelog_md_file, insertion_flag="", mask_initial_release=False, ), changelog_style="angular", ) assert expected_changelog == actual_changelog @pytest.mark.parametrize("hvcs_client", [Github, Gitlab, Gitea, Bitbucket]) def test_default_changelog_template_w_unreleased_changes( hvcs_client: type[Bitbucket | Gitea | Github | Gitlab], example_git_https_url: str, artificial_release_history: ReleaseHistory, changelog_md_file: Path, today_date_str: str, ): hvcs = hvcs_client(example_git_https_url) latest_version = next(iter(artificial_release_history.released.keys())) latest_release = artificial_release_history.released[latest_version] first_version = list(artificial_release_history.released.keys())[-1] feat_commit_obj = latest_release["elements"]["feature"][0] fix_commit_obj_1 = latest_release["elements"]["fix"][0] fix_commit_obj_2 = latest_release["elements"]["fix"][1] fix_commit_obj_3 = latest_release["elements"]["fix"][2] assert isinstance(feat_commit_obj, ParsedCommit) assert isinstance(fix_commit_obj_1, ParsedCommit) assert isinstance(fix_commit_obj_2, ParsedCommit) assert isinstance(fix_commit_obj_3, ParsedCommit) feat_commit_url = hvcs.commit_hash_url(feat_commit_obj.commit.hexsha) feat_description = str.join("\n", feat_commit_obj.descriptions) fix_commit_1_url = hvcs.commit_hash_url(fix_commit_obj_1.commit.hexsha) fix_commit_1_description = str.join("\n", fix_commit_obj_1.descriptions) fix_commit_2_url = hvcs.commit_hash_url(fix_commit_obj_2.commit.hexsha) fix_commit_2_description = str.join("\n", fix_commit_obj_2.descriptions) fix_commit_3_url = hvcs.commit_hash_url(fix_commit_obj_3.commit.hexsha) fix_commit_3_description = str.join("\n", fix_commit_obj_3.descriptions) expected_changelog = str.join( "\n", [ "# CHANGELOG", "", "", "## Unreleased", "", "### Feature", "", f"- **{feat_commit_obj.scope}**: {feat_description.capitalize()}", f" ([`{feat_commit_obj.commit.hexsha[:7]}`]({feat_commit_url}))", "", "", f"## v{latest_version} ({today_date_str})", "", "### Feature", "", # Due to the 100 character limit, hash url will be on the second line f"- **{feat_commit_obj.scope}**: {feat_description.capitalize()}", f" ([`{feat_commit_obj.commit.hexsha[:7]}`]({feat_commit_url}))", "", "### Fix", "", # Commit 2 is first because it has no scope # Due to the 100 character limit, hash url will be on the second line f"- {fix_commit_2_description.capitalize()}", f" ([`{fix_commit_obj_2.commit.hexsha[:7]}`]({fix_commit_2_url}))", "", # Commit 3 is second because it starts with an A even though it has the same scope as 1 # Due to the 100 character limit, hash url will be on the second line f"- **{fix_commit_obj_3.scope}**: {fix_commit_3_description.capitalize()}", f" ([`{fix_commit_obj_3.commit.hexsha[:7]}`]({fix_commit_3_url}))", "", # Due to the 100 character limit, hash url will be on the second line f"- **{fix_commit_obj_1.scope}**: {fix_commit_1_description.capitalize()}", f" ([`{fix_commit_obj_1.commit.hexsha[:7]}`]({fix_commit_1_url}))", "", "", f"## v{first_version} ({today_date_str})", "", "- Initial Release", ], ) actual_changelog = render_default_changelog_file( output_format=ChangelogOutputFormat.MARKDOWN, changelog_context=make_changelog_context( hvcs_client=hvcs, release_history=artificial_release_history, mode=ChangelogMode.INIT, prev_changelog_file=changelog_md_file, insertion_flag="", mask_initial_release=True, ), changelog_style="angular", ) assert expected_changelog == actual_changelog @pytest.mark.parametrize("hvcs_client", [Github, Gitlab, Gitea, Bitbucket]) def test_default_changelog_template_w_a_notice( hvcs_client: type[Github | Gitlab | Gitea | Bitbucket], example_git_https_url: str, release_history_w_a_notice: ReleaseHistory, changelog_md_file: Path, today_date_str: str, ): hvcs = hvcs_client(example_git_https_url) released_versions = iter(release_history_w_a_notice.released.keys()) latest_version = next(released_versions) prev_version_1 = next(released_versions) prev_version_2 = next(released_versions) latest_release = release_history_w_a_notice.released[latest_version] prev_release_1 = release_history_w_a_notice.released[prev_version_1] notice_commit_obj = next(iter(latest_release["elements"].values()))[0] feat_commit_obj = prev_release_1["elements"]["feature"][0] fix_commit_obj_1 = prev_release_1["elements"]["fix"][0] fix_commit_obj_2 = prev_release_1["elements"]["fix"][1] fix_commit_obj_3 = prev_release_1["elements"]["fix"][2] assert isinstance(notice_commit_obj, ParsedCommit) assert isinstance(feat_commit_obj, ParsedCommit) assert isinstance(fix_commit_obj_1, ParsedCommit) assert isinstance(fix_commit_obj_2, ParsedCommit) assert isinstance(fix_commit_obj_3, ParsedCommit) notice_commit_url = hvcs.commit_hash_url(notice_commit_obj.commit.hexsha) notice_commit_description = str.join("\n", notice_commit_obj.descriptions) notice_description = str.join("\n", notice_commit_obj.release_notices) feat_commit_url = hvcs.commit_hash_url(feat_commit_obj.commit.hexsha) feat_description = str.join("\n", feat_commit_obj.descriptions) fix_commit_1_url = hvcs.commit_hash_url(fix_commit_obj_1.commit.hexsha) fix_commit_1_description = str.join("\n", fix_commit_obj_1.descriptions) fix_commit_2_url = hvcs.commit_hash_url(fix_commit_obj_2.commit.hexsha) fix_commit_2_description = str.join("\n", fix_commit_obj_2.descriptions) fix_commit_3_url = hvcs.commit_hash_url(fix_commit_obj_3.commit.hexsha) fix_commit_3_description = str.join("\n", fix_commit_obj_3.descriptions) expected_changelog = str.join( "\n", [ "# CHANGELOG", "", "", f"## v{latest_version} ({today_date_str})", "", "### Refactoring", "", # Due to the 100 character limit, hash url will be on the second line f"- **{notice_commit_obj.scope}**: {notice_commit_description.capitalize().rstrip()}", f" ([`{notice_commit_obj.commit.hexsha[:7]}`]({notice_commit_url}))", "", "### Additional Release Information", "", "- {commit_scope}{change_desc}".format( commit_scope=( f"**{notice_commit_obj.scope}**: " if notice_commit_obj.scope else "" ), change_desc=notice_description.capitalize().rstrip(), ), "", "", f"## v{prev_version_1} ({today_date_str})", "", "### Feature", "", # Due to the 100 character limit, hash url will be on the second line f"- **{feat_commit_obj.scope}**: {feat_description.capitalize()}", f" ([`{feat_commit_obj.commit.hexsha[:7]}`]({feat_commit_url}))", "", "### Fix", "", # Commit 2 is first because it has no scope # Due to the 100 character limit, hash url will be on the second line f"- {fix_commit_2_description.capitalize()}", f" ([`{fix_commit_obj_2.commit.hexsha[:7]}`]({fix_commit_2_url}))", "", # Commit 3 is second because it starts with an A even though it has the same scope as 1 # Due to the 100 character limit, hash url will be on the second line f"- **{fix_commit_obj_3.scope}**: {fix_commit_3_description.capitalize()}", f" ([`{fix_commit_obj_3.commit.hexsha[:7]}`]({fix_commit_3_url}))", "", # Due to the 100 character limit, hash url will be on the second line f"- **{fix_commit_obj_1.scope}**: {fix_commit_1_description.capitalize()}", f" ([`{fix_commit_obj_1.commit.hexsha[:7]}`]({fix_commit_1_url}))", "", "", f"## v{prev_version_2} ({today_date_str})", "", "### Feature", "", # Due to the 100 character limit, hash url will be on the second line f"- **{feat_commit_obj.scope}**: {feat_description.capitalize()}", f" ([`{feat_commit_obj.commit.hexsha[:7]}`]({feat_commit_url}))", ], ) actual_changelog = render_default_changelog_file( output_format=ChangelogOutputFormat.MARKDOWN, changelog_context=make_changelog_context( hvcs_client=hvcs, release_history=release_history_w_a_notice, mode=ChangelogMode.INIT, prev_changelog_file=changelog_md_file, insertion_flag="", mask_initial_release=False, ), changelog_style="angular", ) assert expected_changelog == actual_changelog @pytest.mark.parametrize("hvcs_client", [Github, Gitlab, Gitea, Bitbucket]) def test_default_changelog_template_w_a_notice_n_brk_change( hvcs_client: type[Github | Gitlab | Gitea | Bitbucket], example_git_https_url: str, release_history_w_notice_n_brk_change: ReleaseHistory, changelog_md_file: Path, today_date_str: str, ): hvcs = hvcs_client(example_git_https_url) released_versions = iter(release_history_w_notice_n_brk_change.released.keys()) latest_version = next(released_versions) prev_version_1 = next(released_versions) prev_version_2 = next(released_versions) latest_release = release_history_w_notice_n_brk_change.released[latest_version] prev_release_1 = release_history_w_notice_n_brk_change.released[prev_version_1] brk_fix_commit_obj = latest_release["elements"]["Bug Fixes"][0] notice_commit_obj = latest_release["elements"]["Refactoring"][0] feat_commit_obj = prev_release_1["elements"]["feature"][0] fix_commit_obj_1 = prev_release_1["elements"]["fix"][0] fix_commit_obj_2 = prev_release_1["elements"]["fix"][1] fix_commit_obj_3 = prev_release_1["elements"]["fix"][2] assert isinstance(brk_fix_commit_obj, ParsedCommit) assert isinstance(notice_commit_obj, ParsedCommit) assert isinstance(feat_commit_obj, ParsedCommit) assert isinstance(fix_commit_obj_1, ParsedCommit) assert isinstance(fix_commit_obj_2, ParsedCommit) assert isinstance(fix_commit_obj_3, ParsedCommit) brk_fix_commit_url = hvcs.commit_hash_url(brk_fix_commit_obj.commit.hexsha) brk_fix_description = str.join("\n", brk_fix_commit_obj.descriptions) brk_fix_brking_description = str.join( "\n", brk_fix_commit_obj.breaking_descriptions ) notice_commit_url = hvcs.commit_hash_url(notice_commit_obj.commit.hexsha) notice_commit_description = str.join("\n", notice_commit_obj.descriptions) notice_description = str.join("\n", notice_commit_obj.release_notices) feat_commit_url = hvcs.commit_hash_url(feat_commit_obj.commit.hexsha) feat_description = str.join("\n", feat_commit_obj.descriptions) fix_commit_1_url = hvcs.commit_hash_url(fix_commit_obj_1.commit.hexsha) fix_commit_1_description = str.join("\n", fix_commit_obj_1.descriptions) fix_commit_2_url = hvcs.commit_hash_url(fix_commit_obj_2.commit.hexsha) fix_commit_2_description = str.join("\n", fix_commit_obj_2.descriptions) fix_commit_3_url = hvcs.commit_hash_url(fix_commit_obj_3.commit.hexsha) fix_commit_3_description = str.join("\n", fix_commit_obj_3.descriptions) expected_changelog = str.join( "\n", [ "# CHANGELOG", "", "", f"## v{latest_version} ({today_date_str})", "", "### Bug Fixes", "", "- {commit_scope}{commit_desc}".format( commit_scope=( f"**{brk_fix_commit_obj.scope}**: " if brk_fix_commit_obj.scope else "" ), commit_desc=brk_fix_description.capitalize().rstrip(), ), f" ([`{brk_fix_commit_obj.commit.hexsha[:7]}`]({brk_fix_commit_url}))", "", "### Refactoring", "", "- {commit_scope}{commit_desc}".format( commit_scope=( f"**{notice_commit_obj.scope}**: " if notice_commit_obj.scope else "" ), commit_desc=notice_commit_description.capitalize().rstrip(), ), f" ([`{notice_commit_obj.commit.hexsha[:7]}`]({notice_commit_url}))", "", "### Breaking Changes", "", "- {commit_scope}{change_desc}".format( commit_scope=( f"**{brk_fix_commit_obj.scope}**: " if brk_fix_commit_obj.scope else "" ), change_desc=brk_fix_brking_description.capitalize().rstrip(), ), "", "### Additional Release Information", "", "- {commit_scope}{change_desc}".format( commit_scope=( f"**{notice_commit_obj.scope}**: " if notice_commit_obj.scope else "" ), change_desc=notice_description.capitalize().rstrip(), ), "", "", f"## v{prev_version_1} ({today_date_str})", "", "### Feature", "", # Due to the 100 character limit, hash url will be on the second line f"- **{feat_commit_obj.scope}**: {feat_description.capitalize()}", f" ([`{feat_commit_obj.commit.hexsha[:7]}`]({feat_commit_url}))", "", "### Fix", "", # Commit 2 is first because it has no scope # Due to the 100 character limit, hash url will be on the second line f"- {fix_commit_2_description.capitalize()}", f" ([`{fix_commit_obj_2.commit.hexsha[:7]}`]({fix_commit_2_url}))", "", # Commit 3 is second because it starts with an A even though it has the same scope as 1 # Due to the 100 character limit, hash url will be on the second line f"- **{fix_commit_obj_3.scope}**: {fix_commit_3_description.capitalize()}", f" ([`{fix_commit_obj_3.commit.hexsha[:7]}`]({fix_commit_3_url}))", "", # Due to the 100 character limit, hash url will be on the second line f"- **{fix_commit_obj_1.scope}**: {fix_commit_1_description.capitalize()}", f" ([`{fix_commit_obj_1.commit.hexsha[:7]}`]({fix_commit_1_url}))", "", "", f"## v{prev_version_2} ({today_date_str})", "", "### Feature", "", # Due to the 100 character limit, hash url will be on the second line f"- **{feat_commit_obj.scope}**: {feat_description.capitalize()}", f" ([`{feat_commit_obj.commit.hexsha[:7]}`]({feat_commit_url}))", ], ) actual_changelog = render_default_changelog_file( output_format=ChangelogOutputFormat.MARKDOWN, changelog_context=make_changelog_context( hvcs_client=hvcs, release_history=release_history_w_notice_n_brk_change, mode=ChangelogMode.INIT, prev_changelog_file=changelog_md_file, insertion_flag="", mask_initial_release=False, ), changelog_style="angular", ) assert expected_changelog == actual_changelog @pytest.mark.parametrize("hvcs_client", [Github, Gitlab, Gitea, Bitbucket]) def test_default_changelog_template_w_multiple_notices( hvcs_client: type[Github | Gitlab | Gitea | Bitbucket], example_git_https_url: str, release_history_w_multiple_notices: ReleaseHistory, changelog_md_file: Path, today_date_str: str, ): hvcs = hvcs_client(example_git_https_url) released_versions = iter(release_history_w_multiple_notices.released.keys()) latest_version = next(released_versions) prev_version_1 = next(released_versions) prev_version_2 = next(released_versions) latest_release = release_history_w_multiple_notices.released[latest_version] prev_release_1 = release_history_w_multiple_notices.released[prev_version_1] feat_notice_commit_obj = latest_release["elements"]["Features"][0] refactor_notice_commit_obj = latest_release["elements"]["Refactoring"][0] feat_commit_obj = prev_release_1["elements"]["feature"][0] fix_commit_obj_1 = prev_release_1["elements"]["fix"][0] fix_commit_obj_2 = prev_release_1["elements"]["fix"][1] fix_commit_obj_3 = prev_release_1["elements"]["fix"][2] assert isinstance(feat_notice_commit_obj, ParsedCommit) assert isinstance(refactor_notice_commit_obj, ParsedCommit) assert isinstance(feat_commit_obj, ParsedCommit) assert isinstance(fix_commit_obj_1, ParsedCommit) assert isinstance(fix_commit_obj_2, ParsedCommit) assert isinstance(fix_commit_obj_3, ParsedCommit) refactor_commit_url = hvcs.commit_hash_url(refactor_notice_commit_obj.commit.hexsha) refactor_commit_desc = str.join("\n", refactor_notice_commit_obj.descriptions) refactor_commit_notice_desc = str.join( "\n", refactor_notice_commit_obj.release_notices ) feat_notice_commit_url = hvcs.commit_hash_url(feat_notice_commit_obj.commit.hexsha) feat_notice_description = str.join("\n", feat_notice_commit_obj.descriptions) feat_commit_notice_desc = str.join("\n", feat_notice_commit_obj.release_notices) feat_commit_url = hvcs.commit_hash_url(feat_commit_obj.commit.hexsha) feat_description = str.join("\n", feat_commit_obj.descriptions) fix_commit_1_url = hvcs.commit_hash_url(fix_commit_obj_1.commit.hexsha) fix_commit_1_description = str.join("\n", fix_commit_obj_1.descriptions) fix_commit_2_url = hvcs.commit_hash_url(fix_commit_obj_2.commit.hexsha) fix_commit_2_description = str.join("\n", fix_commit_obj_2.descriptions) fix_commit_3_url = hvcs.commit_hash_url(fix_commit_obj_3.commit.hexsha) fix_commit_3_description = str.join("\n", fix_commit_obj_3.descriptions) expected_changelog = str.join( "\n", [ "# CHANGELOG", "", "", f"## v{latest_version} ({today_date_str})", "", "### Features", "", "- {commit_scope}{commit_desc}".format( commit_scope=( f"**{feat_notice_commit_obj.scope}**: " if feat_notice_commit_obj.scope else "" ), commit_desc=feat_notice_description.capitalize().rstrip(), ), f" ([`{feat_notice_commit_obj.commit.hexsha[:7]}`]({feat_notice_commit_url}))", "", "### Refactoring", "", "- {commit_scope}{commit_desc}".format( commit_scope=( f"**{refactor_notice_commit_obj.scope}**: " if refactor_notice_commit_obj.scope else "" ), commit_desc=refactor_commit_desc.capitalize().rstrip(), ), f" ([`{refactor_notice_commit_obj.commit.hexsha[:7]}`]({refactor_commit_url}))", "", "### Additional Release Information", "", "- {commit_scope}{change_desc}".format( commit_scope=( f"**{refactor_notice_commit_obj.scope}**: " if refactor_notice_commit_obj.scope else "" ), change_desc=refactor_commit_notice_desc.capitalize().rstrip(), ), "", "- {commit_scope}{change_desc}".format( commit_scope=( f"**{feat_notice_commit_obj.scope}**: " if feat_notice_commit_obj.scope else "" ), change_desc=str.join( "\n", [ feat_commit_notice_desc.capitalize()[:73].rstrip(), " " + feat_commit_notice_desc[73:].strip(), ], ), ), "", "", f"## v{prev_version_1} ({today_date_str})", "", "### Feature", "", # Due to the 100 character limit, hash url will be on the second line f"- **{feat_commit_obj.scope}**: {feat_description.capitalize()}", f" ([`{feat_commit_obj.commit.hexsha[:7]}`]({feat_commit_url}))", "", "### Fix", "", # Commit 2 is first because it has no scope # Due to the 100 character limit, hash url will be on the second line f"- {fix_commit_2_description.capitalize()}", f" ([`{fix_commit_obj_2.commit.hexsha[:7]}`]({fix_commit_2_url}))", "", # Commit 3 is second because it starts with an A even though it has the same scope as 1 # Due to the 100 character limit, hash url will be on the second line f"- **{fix_commit_obj_3.scope}**: {fix_commit_3_description.capitalize()}", f" ([`{fix_commit_obj_3.commit.hexsha[:7]}`]({fix_commit_3_url}))", "", # Due to the 100 character limit, hash url will be on the second line f"- **{fix_commit_obj_1.scope}**: {fix_commit_1_description.capitalize()}", f" ([`{fix_commit_obj_1.commit.hexsha[:7]}`]({fix_commit_1_url}))", "", "", f"## v{prev_version_2} ({today_date_str})", "", "### Feature", "", # Due to the 100 character limit, hash url will be on the second line f"- **{feat_commit_obj.scope}**: {feat_description.capitalize()}", f" ([`{feat_commit_obj.commit.hexsha[:7]}`]({feat_commit_url}))", ], ) actual_changelog = render_default_changelog_file( output_format=ChangelogOutputFormat.MARKDOWN, changelog_context=make_changelog_context( hvcs_client=hvcs, release_history=release_history_w_multiple_notices, mode=ChangelogMode.INIT, prev_changelog_file=changelog_md_file, insertion_flag="", mask_initial_release=False, ), changelog_style="angular", ) assert expected_changelog == actual_changelog python-semantic-release-9.21.0/tests/unit/semantic_release/changelog/test_release_history.py000066400000000000000000000250341475670435200325510ustar00rootroot00000000000000from __future__ import annotations from datetime import datetime from typing import TYPE_CHECKING, NamedTuple import pytest from git import Actor from pytest_lazy_fixtures.lazy_fixture import lf as lazy_fixture from semantic_release.changelog.release_history import ReleaseHistory from semantic_release.version.translator import VersionTranslator from semantic_release.version.version import Version from tests.const import COMMIT_MESSAGE, CONVENTIONAL_COMMITS_MINOR from tests.fixtures import ( repo_w_git_flow_w_alpha_prereleases_n_conventional_commits, repo_w_git_flow_w_rc_n_alpha_prereleases_n_conventional_commits, repo_w_github_flow_w_feature_release_channel_conventional_commits, repo_w_no_tags_conventional_commits, repo_w_trunk_only_conventional_commits, repo_w_trunk_only_n_prereleases_conventional_commits, ) from tests.util import add_text_to_file if TYPE_CHECKING: from typing import Protocol from semantic_release.commit_parser.conventional import ConventionalCommitParser from tests.fixtures.git_repo import ( BuiltRepoResult, GetCommitsFromRepoBuildDefFn, RepoDefinition, ) class CreateReleaseHistoryFromRepoDefFn(Protocol): def __call__(self, repo_def: RepoDefinition) -> FakeReleaseHistoryElements: ... # NOTE: not testing parser correctness here, just that the right commits end up # in the right places. So we only compare that the commits with the messages # we anticipate are in the right place, rather than by hash # So we are only using the conventional parser # We are also currently only testing that the "elements" key of the releases # is correct, i.e. the commits are in the right place - the other fields # will need special attention of their own later class FakeReleaseHistoryElements(NamedTuple): """ A fake release history structure that abstracts away the Parser-specific logic and only focuses that the commit messages are in the correct order and place. Where generally a ParsedCommit object exists, here we just use the actual `commit.message`. """ unreleased: dict[str, list[str]] released: dict[Version, dict[str, list[str]]] @pytest.fixture(scope="session") def create_release_history_from_repo_def() -> CreateReleaseHistoryFromRepoDefFn: def _create_release_history_from_repo_def( repo_def: RepoDefinition, ) -> FakeReleaseHistoryElements: # Organize the commits into the expected structure unreleased_history = {} released_history = {} for version_str, version_def in repo_def.items(): commits_per_group: dict[str, list] = { "Unknown": [], } for commit in version_def["commits"]: if commit["category"] not in commits_per_group: commits_per_group[commit["category"]] = [] commits_per_group[commit["category"]].append( # TODO: remove the newline when our release history strips whitespace from commit messages commit["msg"].strip() + "\n" ) if version_str == "Unreleased": unreleased_history = commits_per_group continue # handle released versions version = Version.parse(version_str) # add the PSR version commit message commits_per_group["Unknown"].append(COMMIT_MESSAGE.format(version=version)) # store the organized commits for this version released_history[version] = commits_per_group return FakeReleaseHistoryElements( unreleased=unreleased_history, released=released_history, ) return _create_release_history_from_repo_def @pytest.mark.parametrize( "repo_result", [ # CONVENTIONAL parser lazy_fixture(repo_w_no_tags_conventional_commits.__name__), *[ pytest.param( lazy_fixture(repo_fixture_name), marks=pytest.mark.comprehensive, ) for repo_fixture_name in [ repo_w_trunk_only_conventional_commits.__name__, repo_w_trunk_only_n_prereleases_conventional_commits.__name__, # This is not tested because currently unable to disern the commits that were squashed or not # repo_w_github_flow_w_default_release_channel_conventional_commits.__name__, repo_w_github_flow_w_feature_release_channel_conventional_commits.__name__, repo_w_git_flow_w_alpha_prereleases_n_conventional_commits.__name__, repo_w_git_flow_w_rc_n_alpha_prereleases_n_conventional_commits.__name__, ] ], ], ) @pytest.mark.order("last") def test_release_history( repo_result: BuiltRepoResult, default_conventional_parser: ConventionalCommitParser, file_in_repo: str, create_release_history_from_repo_def: CreateReleaseHistoryFromRepoDefFn, get_commits_from_repo_build_def: GetCommitsFromRepoBuildDefFn, ): repo = repo_result["repo"] expected_release_history = create_release_history_from_repo_def( get_commits_from_repo_build_def(repo_result["definition"]) ) expected_released_versions = sorted( map(str, expected_release_history.released.keys()) ) translator = VersionTranslator() # Nothing has unreleased commits currently history = ReleaseHistory.from_git_history( repo, translator, default_conventional_parser, # type: ignore[arg-type] ) released = history.released actual_released_versions = sorted(map(str, released.keys())) assert expected_released_versions == actual_released_versions for k in expected_release_history.released: expected = expected_release_history.released[k] expected_released_messages = str.join( "\n---\n", sorted([msg for bucket in expected.values() for msg in bucket]) ) actual = released[k]["elements"] actual_released_messages = str.join( "\n---\n", sorted( [ str(res.commit.message) for results in actual.values() for res in results ] ), ) assert expected_released_messages == actual_released_messages # PART 2: add some commits to the repo and check that they are in the right place for commit_message in CONVENTIONAL_COMMITS_MINOR: add_text_to_file(repo, file_in_repo) repo.git.commit(m=commit_message) expected_unreleased_messages = str.join( "\n---\n", sorted( [ msg for bucket in [ CONVENTIONAL_COMMITS_MINOR[::-1], *expected_release_history.unreleased.values(), ] for msg in bucket ] ), ) # Now we should have some unreleased commits, and nothing new released new_history = ReleaseHistory.from_git_history( repo, translator, default_conventional_parser, # type: ignore[arg-type] ) new_unreleased = new_history.unreleased new_released = new_history.released actual_unreleased_messages = str.join( "\n---\n", sorted( [ str(res.commit.message) for results in new_unreleased.values() for res in results ] ), ) assert expected_unreleased_messages == actual_unreleased_messages assert ( new_released == released ), "something that shouldn't be considered release has been released" @pytest.mark.parametrize( "repo_result", [ lazy_fixture(repo_w_no_tags_conventional_commits.__name__), *[ pytest.param( lazy_fixture(repo_fixture_name), marks=pytest.mark.comprehensive, ) for repo_fixture_name in [ repo_w_trunk_only_conventional_commits.__name__, repo_w_trunk_only_n_prereleases_conventional_commits.__name__, repo_w_github_flow_w_feature_release_channel_conventional_commits.__name__, repo_w_git_flow_w_alpha_prereleases_n_conventional_commits.__name__, repo_w_git_flow_w_rc_n_alpha_prereleases_n_conventional_commits.__name__, ] ], ], ) @pytest.mark.order("last") def test_release_history_releases( repo_result: BuiltRepoResult, default_conventional_parser: ConventionalCommitParser ): new_version = Version.parse("100.10.1") actor = Actor("semantic-release", "semantic-release") release_history = ReleaseHistory.from_git_history( repo=repo_result["repo"], translator=VersionTranslator(), commit_parser=default_conventional_parser, # type: ignore[arg-type] ) tagged_date = datetime.now() new_rh = release_history.release( new_version, committer=actor, tagger=actor, tagged_date=tagged_date, ) assert new_rh is not release_history assert new_rh.unreleased == {} assert new_rh.released == { new_version: { "tagger": actor, "committer": actor, "tagged_date": tagged_date, "elements": release_history.unreleased, "version": new_version, }, **release_history.released, } @pytest.mark.parametrize( "repo_result", [ lazy_fixture(repo_w_no_tags_conventional_commits.__name__), *[ pytest.param( lazy_fixture(repo_fixture_name), marks=pytest.mark.comprehensive, ) for repo_fixture_name in [ repo_w_trunk_only_conventional_commits.__name__, repo_w_trunk_only_n_prereleases_conventional_commits.__name__, repo_w_github_flow_w_feature_release_channel_conventional_commits.__name__, repo_w_git_flow_w_alpha_prereleases_n_conventional_commits.__name__, repo_w_git_flow_w_rc_n_alpha_prereleases_n_conventional_commits.__name__, ] ], ], ) @pytest.mark.order("last") def test_all_matching_repo_tags_are_released( repo_result: BuiltRepoResult, default_conventional_parser: ConventionalCommitParser ): repo = repo_result["repo"] translator = VersionTranslator() release_history = ReleaseHistory.from_git_history( repo=repo, translator=translator, commit_parser=default_conventional_parser, # type: ignore[arg-type] ) for tag in repo.tags: assert translator.from_tag(tag.name) in release_history.released python-semantic-release-9.21.0/tests/unit/semantic_release/changelog/test_release_notes.py000066400000000000000000001074761475670435200322130ustar00rootroot00000000000000from __future__ import annotations import os from pathlib import Path from textwrap import dedent from typing import TYPE_CHECKING from unittest import mock import pytest # NOTE: use backport with newer API to support 3.7 from importlib_resources import files import semantic_release from semantic_release.cli.changelog_writer import generate_release_notes from semantic_release.commit_parser.token import ParsedCommit from semantic_release.hvcs import Bitbucket, Gitea, Github, Gitlab if TYPE_CHECKING: from semantic_release.changelog.release_history import ReleaseHistory from tests.fixtures.example_project import ExProjectDir @pytest.fixture(scope="module") def release_notes_template() -> str: """Retrieve the semantic-release default release notes template.""" version_notes_template = files(semantic_release.__name__).joinpath( Path("data", "templates", "angular", "md", ".release_notes.md.j2") ) return version_notes_template.read_text(encoding="utf-8") @pytest.mark.parametrize("mask_initial_release", [True, False]) @pytest.mark.parametrize("license_name", ["", "MIT"]) @pytest.mark.parametrize("hvcs_client", [Github, Gitlab, Gitea, Bitbucket]) def test_default_release_notes_template( example_git_https_url: str, hvcs_client: type[Github | Gitlab | Gitea | Bitbucket], license_name: str, artificial_release_history: ReleaseHistory, mask_initial_release: bool, today_date_str: str, ): """ Unit test goal: just make sure it renders the release notes template without error. Scenarios are better suited for all the variations (commit types). """ released_versions = iter(artificial_release_history.released.keys()) version = next(released_versions) prev_version = next(released_versions) hvcs = hvcs_client(example_git_https_url) release = artificial_release_history.released[version] feat_commit_obj = release["elements"]["feature"][0] fix_commit_obj_1 = release["elements"]["fix"][0] fix_commit_obj_2 = release["elements"]["fix"][1] fix_commit_obj_3 = release["elements"]["fix"][2] assert isinstance(feat_commit_obj, ParsedCommit) assert isinstance(fix_commit_obj_1, ParsedCommit) assert isinstance(fix_commit_obj_2, ParsedCommit) assert isinstance(fix_commit_obj_3, ParsedCommit) feat_commit_url = hvcs.commit_hash_url(feat_commit_obj.commit.hexsha) feat_description = str.join("\n", feat_commit_obj.descriptions) fix_commit_1_url = hvcs.commit_hash_url(fix_commit_obj_1.commit.hexsha) fix_commit_1_description = str.join("\n", fix_commit_obj_1.descriptions) fix_commit_2_url = hvcs.commit_hash_url(fix_commit_obj_2.commit.hexsha) fix_commit_2_description = str.join("\n", fix_commit_obj_2.descriptions) fix_commit_3_url = hvcs.commit_hash_url(fix_commit_obj_3.commit.hexsha) fix_commit_3_description = str.join("\n", fix_commit_obj_3.descriptions) expected_content = str.join( os.linesep, [ f"## v{version} ({today_date_str})", *( [""] if not license_name else [ "", f"_This release is published under the {license_name} License._", "", ] ), "### Feature", "", "- {commit_scope}{commit_desc} ([`{short_hash}`]({url}))".format( commit_scope=( f"**{feat_commit_obj.scope}**: " if feat_commit_obj.scope else "" ), commit_desc=feat_description.capitalize(), short_hash=feat_commit_obj.commit.hexsha[:7], url=feat_commit_url, ), "", "### Fix", "", # Commit 2 is first because it has no scope "- {commit_scope}{commit_desc} ([`{short_hash}`]({url}))".format( commit_scope=( f"**{fix_commit_obj_2.scope}**: " if fix_commit_obj_2.scope else "" ), commit_desc=fix_commit_2_description.capitalize(), short_hash=fix_commit_obj_2.commit.hexsha[:7], url=fix_commit_2_url, ), "", # Commit 3 is second because it starts with an A even though it has the same scope as 1 "- {commit_scope}{commit_desc} ([`{short_hash}`]({url}))".format( commit_scope=( f"**{fix_commit_obj_3.scope}**: " if fix_commit_obj_3.scope else "" ), commit_desc=fix_commit_3_description.capitalize(), short_hash=fix_commit_obj_3.commit.hexsha[:7], url=fix_commit_3_url, ), "", # Commit 1 is last "- {commit_scope}{commit_desc} ([`{short_hash}`]({url}))".format( commit_scope=( f"**{fix_commit_obj_1.scope}**: " if fix_commit_obj_1.scope else "" ), commit_desc=fix_commit_1_description.capitalize(), short_hash=fix_commit_obj_1.commit.hexsha[:7], url=fix_commit_1_url, ), "", ], ) if not isinstance(hvcs, Gitea): expected_content += str.join( os.linesep, [ "", "---", "", "**Detailed Changes**: [{prev_version}...{new_version}]({version_compare_url})".format( prev_version=prev_version.as_tag(), new_version=version.as_tag(), version_compare_url=hvcs.compare_url( prev_version.as_tag(), version.as_tag() ), ), "", ], ) actual_content = generate_release_notes( hvcs_client=hvcs_client(remote_url=example_git_https_url), release=release, template_dir=Path(""), history=artificial_release_history, style="angular", mask_initial_release=mask_initial_release, license_name=license_name, ) assert expected_content == actual_content @pytest.mark.parametrize("mask_initial_release", [True, False]) @pytest.mark.parametrize("hvcs_client", [Github, Gitlab, Gitea, Bitbucket]) def test_default_release_notes_template_w_a_brk_description( example_git_https_url: str, hvcs_client: type[Github | Gitlab | Gitea | Bitbucket], release_history_w_brk_change: ReleaseHistory, mask_initial_release: bool, today_date_str: str, ): """ Unit test goal: just make sure it renders the release notes template without error. Scenarios are better suited for all the variations (commit types). """ released_versions = iter(release_history_w_brk_change.released.keys()) version = next(released_versions) prev_version = next(released_versions) hvcs = hvcs_client(example_git_https_url) release = release_history_w_brk_change.released[version] brk_fix_commit_obj = next(iter(release["elements"].values()))[0] assert isinstance(brk_fix_commit_obj, ParsedCommit) brk_fix_commit_url = hvcs.commit_hash_url(brk_fix_commit_obj.commit.hexsha) brk_fix_description = str.join("\n", brk_fix_commit_obj.descriptions) brk_fix_brking_description = str.join( "\n", brk_fix_commit_obj.breaking_descriptions ) expected_content = str.join( os.linesep, [ f"## v{version} ({today_date_str})", "", "### Bug Fixes", "", "- {commit_scope}{commit_desc} ([`{short_hash}`]({url}))".format( commit_scope=( f"**{brk_fix_commit_obj.scope}**: " if brk_fix_commit_obj.scope else "" ), commit_desc=brk_fix_description.capitalize(), short_hash=brk_fix_commit_obj.commit.hexsha[:7], url=brk_fix_commit_url, ), "", "### Breaking Changes", "", "- {commit_scope}{change_desc}".format( commit_scope=( f"**{brk_fix_commit_obj.scope}**: " if brk_fix_commit_obj.scope else "" ), change_desc=brk_fix_brking_description.capitalize(), ), "", ], ) if not isinstance(hvcs, Gitea): expected_content += str.join( os.linesep, [ "", "---", "", "**Detailed Changes**: [{prev_version}...{new_version}]({version_compare_url})".format( prev_version=prev_version.as_tag(), new_version=version.as_tag(), version_compare_url=hvcs.compare_url( prev_version.as_tag(), version.as_tag() ), ), "", ], ) actual_content = generate_release_notes( hvcs_client=hvcs_client(remote_url=example_git_https_url), release=release, template_dir=Path(""), history=release_history_w_brk_change, style="angular", mask_initial_release=mask_initial_release, ) assert expected_content == actual_content @pytest.mark.parametrize("mask_initial_release", [True, False]) @pytest.mark.parametrize("hvcs_client", [Github, Gitlab, Gitea, Bitbucket]) def test_default_release_notes_template_w_multiple_brk_changes( example_git_https_url: str, hvcs_client: type[Github | Gitlab | Gitea | Bitbucket], release_history_w_multiple_brk_changes: ReleaseHistory, mask_initial_release: bool, today_date_str: str, ): """ Unit test goal: just make sure it renders the release notes template without error. Scenarios are better suited for all the variations (commit types). """ released_versions = iter(release_history_w_multiple_brk_changes.released.keys()) version = next(released_versions) prev_version = next(released_versions) hvcs = hvcs_client(example_git_https_url) release = release_history_w_multiple_brk_changes.released[version] brk_fix_commit_obj = release["elements"]["Bug Fixes"][0] brk_feat_commit_obj = release["elements"]["Features"][0] assert isinstance(brk_fix_commit_obj, ParsedCommit) assert isinstance(brk_feat_commit_obj, ParsedCommit) brk_fix_commit_url = hvcs.commit_hash_url(brk_fix_commit_obj.commit.hexsha) brk_fix_description = str.join("\n", brk_fix_commit_obj.descriptions) brk_fix_brking_description = str.join( "\n", brk_fix_commit_obj.breaking_descriptions ) brk_feat_commit_url = hvcs.commit_hash_url(brk_feat_commit_obj.commit.hexsha) brk_feat_description = str.join("\n", brk_feat_commit_obj.descriptions) brk_feat_brking_description = str.join( "\n", brk_feat_commit_obj.breaking_descriptions ) expected_content = str.join( os.linesep, [ f"## v{version} ({today_date_str})", "", "### Bug Fixes", "", "- {commit_scope}{commit_desc} ([`{short_hash}`]({url}))".format( commit_scope=( f"**{brk_fix_commit_obj.scope}**: " if brk_fix_commit_obj.scope else "" ), commit_desc=brk_fix_description.capitalize(), short_hash=brk_fix_commit_obj.commit.hexsha[:7], url=brk_fix_commit_url, ), "", "### Features", "", "- {commit_scope}{commit_desc} ([`{short_hash}`]({url}))".format( commit_scope=( f"**{brk_feat_commit_obj.scope}**: " if brk_feat_commit_obj.scope else "" ), commit_desc=brk_feat_description.capitalize(), short_hash=brk_feat_commit_obj.commit.hexsha[:7], url=brk_feat_commit_url, ), "", "### Breaking Changes", "", "- {commit_scope}{change_desc}".format( commit_scope=( f"**{brk_feat_commit_obj.scope}**: " if brk_feat_commit_obj.scope else "" ), change_desc=brk_feat_brking_description.capitalize(), ), "", "- {commit_scope}{change_desc}".format( commit_scope=( f"**{brk_fix_commit_obj.scope}**: " if brk_fix_commit_obj.scope else "" ), change_desc=brk_fix_brking_description.capitalize(), ), "", ], ) if not isinstance(hvcs, Gitea): expected_content += str.join( os.linesep, [ "", "---", "", "**Detailed Changes**: [{prev_version}...{new_version}]({version_compare_url})".format( prev_version=prev_version.as_tag(), new_version=version.as_tag(), version_compare_url=hvcs.compare_url( prev_version.as_tag(), version.as_tag() ), ), "", ], ) actual_content = generate_release_notes( hvcs_client=hvcs_client(remote_url=example_git_https_url), release=release, template_dir=Path(""), history=release_history_w_multiple_brk_changes, style="angular", mask_initial_release=mask_initial_release, ) assert expected_content == actual_content @pytest.mark.parametrize("license_name", ["", "MIT"]) @pytest.mark.parametrize("hvcs_client", [Github, Gitlab, Gitea, Bitbucket]) def test_default_release_notes_template_first_release_masked( example_git_https_url: str, hvcs_client: type[Bitbucket | Gitea | Github | Gitlab], license_name: str, single_release_history: ReleaseHistory, today_date_str: str, ): """ Unit test goal: just make sure it renders the release notes template without error. Scenarios are better suited for all the variations (commit types). """ hvcs = hvcs_client(example_git_https_url) version = list(single_release_history.released.keys())[-1] release = single_release_history.released[version] expected_content = str.join( os.linesep, [ f"## v{version} ({today_date_str})", *( [""] if not license_name else [ "", f"_This release is published under the {license_name} License._", "", ] ), "- Initial Release", "", ], ) actual_content = generate_release_notes( hvcs_client=hvcs, release=release, template_dir=Path(""), history=single_release_history, style="angular", mask_initial_release=True, license_name=license_name, ) assert expected_content == actual_content @pytest.mark.parametrize("license_name", ["", "MIT"]) @pytest.mark.parametrize("hvcs_client", [Github, Gitlab, Gitea, Bitbucket]) def test_default_release_notes_template_first_release_unmasked( example_git_https_url: str, hvcs_client: type[Bitbucket | Gitea | Github | Gitlab], license_name: str, single_release_history: ReleaseHistory, today_date_str: str, ): """ Unit test goal: just make sure it renders the release notes template without error. Scenarios are better suited for all the variations (commit types). """ hvcs = hvcs_client(example_git_https_url) version = list(single_release_history.released.keys())[-1] release = single_release_history.released[version] feat_commit_obj = release["elements"]["feature"][0] assert isinstance(feat_commit_obj, ParsedCommit) feat_commit_url = hvcs.commit_hash_url(feat_commit_obj.commit.hexsha) feat_description = str.join("\n", feat_commit_obj.descriptions) expected_content = str.join( os.linesep, [ f"## v{version} ({today_date_str})", *( [""] if not license_name else [ "", f"_This release is published under the {license_name} License._", "", ] ), "### Feature", "", "- {commit_scope}{commit_desc} ([`{short_hash}`]({url}))".format( commit_scope=( f"**{feat_commit_obj.scope}**: " if feat_commit_obj.scope else "" ), commit_desc=feat_description.capitalize(), short_hash=feat_commit_obj.commit.hexsha[:7], url=feat_commit_url, ), "", ], ) actual_content = generate_release_notes( hvcs_client=hvcs, release=release, template_dir=Path(""), history=single_release_history, style="angular", mask_initial_release=False, license_name=license_name, ) assert expected_content == actual_content def test_release_notes_context_sort_numerically_filter( example_git_https_url: str, single_release_history: ReleaseHistory, example_project_dir: ExProjectDir, change_to_ex_proj_dir: None, ): version = list(single_release_history.released.keys())[-1] release = single_release_history.released[version] example_project_dir.joinpath(".release_notes.md.j2").write_text( dedent( """\ {{ [ ".. _#5: link", ".. _PR#3: link", ".. _PR#10: link", ".. _#100: link" ] | sort_numerically | join("\\n") }} """ ) ) expected_content = str.join( os.linesep, dedent( """\ .. _#5: link .. _#100: link .. _PR#3: link .. _PR#10: link """ ).split("\n"), ) actual_content = generate_release_notes( hvcs_client=Github(remote_url=example_git_https_url), release=release, template_dir=example_project_dir, history=single_release_history, style="angular", mask_initial_release=False, ) assert expected_content == actual_content def test_release_notes_context_sort_numerically_filter_reversed( example_git_https_url: str, single_release_history: ReleaseHistory, example_project_dir: ExProjectDir, change_to_ex_proj_dir: None, ): version = list(single_release_history.released.keys())[-1] release = single_release_history.released[version] example_project_dir.joinpath(".release_notes.md.j2").write_text( dedent( """\ {{ [ ".. _#5: link", ".. _PR#3: link", ".. _PR#10: link", ".. _#100: link" ] | sort_numerically(reverse=True) | join("\\n") }} """ ) ) expected_content = str.join( os.linesep, dedent( """\ .. _#100: link .. _#5: link .. _PR#10: link .. _PR#3: link """ ).split("\n"), ) actual_content = generate_release_notes( hvcs_client=Github(remote_url=example_git_https_url), release=release, template_dir=example_project_dir, history=single_release_history, style="angular", mask_initial_release=False, ) assert expected_content == actual_content def test_release_notes_context_pypi_url_filter( example_git_https_url: str, single_release_history: ReleaseHistory, example_project_dir: ExProjectDir, change_to_ex_proj_dir: None, ): version = list(single_release_history.released.keys())[-1] release = single_release_history.released[version] example_project_dir.joinpath(".release_notes.md.j2").write_text( """{{ "example-package" | create_pypi_url }}""" ) expected_content = f"https://pypi.org/project/example-package{os.linesep}" actual_content = generate_release_notes( hvcs_client=Github(remote_url=example_git_https_url), release=release, template_dir=example_project_dir, history=single_release_history, style="angular", mask_initial_release=False, ) assert expected_content == actual_content def test_release_notes_context_pypi_url_filter_tagged( example_git_https_url: str, single_release_history: ReleaseHistory, example_project_dir: ExProjectDir, change_to_ex_proj_dir: None, ): version = list(single_release_history.released.keys())[-1] release = single_release_history.released[version] example_project_dir.joinpath(".release_notes.md.j2").write_text( """{{ "example-package" | create_pypi_url(release.version | string) }}""" ) expected_content = f"https://pypi.org/project/example-package/{version}{os.linesep}" actual_content = generate_release_notes( hvcs_client=Github(remote_url=example_git_https_url), release=release, template_dir=example_project_dir, history=single_release_history, style="angular", mask_initial_release=False, ) assert expected_content == actual_content @pytest.mark.parametrize("hvcs_client_class", [Github, Gitlab, Gitea]) def test_release_notes_context_release_url_filter( example_git_https_url: str, hvcs_client_class: type[Github | Gitlab | Gitea], single_release_history: ReleaseHistory, example_project_dir: ExProjectDir, change_to_ex_proj_dir: None, ): version = list(single_release_history.released.keys())[-1] release = single_release_history.released[version] example_project_dir.joinpath(".release_notes.md.j2").write_text( dedent( """\ {{ "[%s](%s)" | format( release.version.as_tag(), release.version.as_tag() | create_release_url, ) }} """ ) ) with mock.patch.dict(os.environ, {}, clear=True): hvcs_client = hvcs_client_class(remote_url=example_git_https_url) expected_content = dedent( f"""\ [{version.as_tag()}]({hvcs_client.create_release_url(version.as_tag())}) """ ) actual_content = generate_release_notes( hvcs_client=hvcs_client, release=release, template_dir=example_project_dir, history=single_release_history, style="angular", mask_initial_release=False, ) assert expected_content == actual_content @pytest.mark.parametrize("hvcs_client_class", [Github, Gitlab, Gitea, Bitbucket]) def test_release_notes_context_format_w_official_name_filter( example_git_https_url: str, hvcs_client_class: type[Github | Gitlab | Gitea], single_release_history: ReleaseHistory, example_project_dir: ExProjectDir, change_to_ex_proj_dir: None, ): version = list(single_release_history.released.keys())[-1] release = single_release_history.released[version] example_project_dir.joinpath(".release_notes.md.j2").write_text( dedent( """\ {{ "%s" | format_w_official_vcs_name }} {{ "{}" | format_w_official_vcs_name }} {{ "{vcs_name}" | format_w_official_vcs_name }} """ ) ) with mock.patch.dict(os.environ, {}, clear=True): hvcs_client = hvcs_client_class(remote_url=example_git_https_url) expected_content = dedent( f"""\ {hvcs_client.OFFICIAL_NAME} {hvcs_client.OFFICIAL_NAME} {hvcs_client.OFFICIAL_NAME} """ ) actual_content = generate_release_notes( hvcs_client=hvcs_client, release=release, template_dir=example_project_dir, history=single_release_history, style="angular", mask_initial_release=False, ) assert expected_content == actual_content @pytest.mark.parametrize("mask_initial_release", [True, False]) @pytest.mark.parametrize("hvcs_client", [Github, Gitlab, Gitea, Bitbucket]) def test_default_release_notes_template_w_a_notice( example_git_https_url: str, hvcs_client: type[Github | Gitlab | Gitea | Bitbucket], release_history_w_a_notice: ReleaseHistory, mask_initial_release: bool, today_date_str: str, ): """ Unit test goal: just make sure it renders the release notes template without error. Scenarios are better suited for all the variations (commit types). """ released_versions = iter(release_history_w_a_notice.released.keys()) version = next(released_versions) prev_version = next(released_versions) hvcs = hvcs_client(example_git_https_url) release = release_history_w_a_notice.released[version] notice_commit_obj = next(iter(release["elements"].values()))[0] assert isinstance(notice_commit_obj, ParsedCommit) notice_commit_url = hvcs.commit_hash_url(notice_commit_obj.commit.hexsha) notice_commit_description = str.join("\n", notice_commit_obj.descriptions) notice_description = str.join("\n", notice_commit_obj.release_notices) expected_content = str.join( os.linesep, [ f"## v{version} ({today_date_str})", "", "### Refactoring", "", "- {commit_scope}{commit_desc} ([`{short_hash}`]({url}))".format( commit_scope=( f"**{notice_commit_obj.scope}**: " if notice_commit_obj.scope else "" ), commit_desc=notice_commit_description.capitalize().rstrip(), short_hash=notice_commit_obj.commit.hexsha[:7], url=notice_commit_url, ), "", "### Additional Release Information", "", "- {commit_scope}{change_desc}".format( commit_scope=( f"**{notice_commit_obj.scope}**: " if notice_commit_obj.scope else "" ), change_desc=notice_description.capitalize().rstrip(), ), "", ], ) if not isinstance(hvcs, Gitea): expected_content += str.join( os.linesep, [ "", "---", "", "**Detailed Changes**: [{prev_version}...{new_version}]({version_compare_url})".format( prev_version=prev_version.as_tag(), new_version=version.as_tag(), version_compare_url=hvcs.compare_url( prev_version.as_tag(), version.as_tag() ), ), "", ], ) actual_content = generate_release_notes( hvcs_client=hvcs_client(remote_url=example_git_https_url), release=release, template_dir=Path(""), history=release_history_w_a_notice, style="angular", mask_initial_release=mask_initial_release, ) assert expected_content == actual_content @pytest.mark.parametrize("mask_initial_release", [True, False]) @pytest.mark.parametrize("hvcs_client", [Github, Gitlab, Gitea, Bitbucket]) def test_default_release_notes_template_w_a_notice_n_brk_change( example_git_https_url: str, hvcs_client: type[Github | Gitlab | Gitea | Bitbucket], release_history_w_notice_n_brk_change: ReleaseHistory, mask_initial_release: bool, today_date_str: str, ): """ Unit test goal: just make sure it renders the release notes template without error. Scenarios are better suited for all the variations (commit types). """ released_versions = iter(release_history_w_notice_n_brk_change.released.keys()) version = next(released_versions) prev_version = next(released_versions) hvcs = hvcs_client(example_git_https_url) release = release_history_w_notice_n_brk_change.released[version] brk_fix_commit_obj = release["elements"]["Bug Fixes"][0] notice_commit_obj = release["elements"]["Refactoring"][0] assert isinstance(brk_fix_commit_obj, ParsedCommit) assert isinstance(notice_commit_obj, ParsedCommit) brk_fix_commit_url = hvcs.commit_hash_url(brk_fix_commit_obj.commit.hexsha) brk_fix_description = str.join("\n", brk_fix_commit_obj.descriptions) brk_fix_brking_description = str.join( "\n", brk_fix_commit_obj.breaking_descriptions ) notice_commit_url = hvcs.commit_hash_url(notice_commit_obj.commit.hexsha) notice_commit_description = str.join("\n", notice_commit_obj.descriptions) notice_description = str.join("\n", notice_commit_obj.release_notices) expected_content = str.join( os.linesep, [ f"## v{version} ({today_date_str})", "", "### Bug Fixes", "", "- {commit_scope}{commit_desc} ([`{short_hash}`]({url}))".format( commit_scope=( f"**{brk_fix_commit_obj.scope}**: " if brk_fix_commit_obj.scope else "" ), commit_desc=brk_fix_description.capitalize().rstrip(), short_hash=brk_fix_commit_obj.commit.hexsha[:7], url=brk_fix_commit_url, ), "", "### Refactoring", "", "- {commit_scope}{commit_desc} ([`{short_hash}`]({url}))".format( commit_scope=( f"**{notice_commit_obj.scope}**: " if notice_commit_obj.scope else "" ), commit_desc=notice_commit_description.capitalize().rstrip(), short_hash=notice_commit_obj.commit.hexsha[:7], url=notice_commit_url, ), "", "### Breaking Changes", "", "- {commit_scope}{change_desc}".format( commit_scope=( f"**{brk_fix_commit_obj.scope}**: " if brk_fix_commit_obj.scope else "" ), change_desc=brk_fix_brking_description.capitalize().rstrip(), ), "", "### Additional Release Information", "", "- {commit_scope}{change_desc}".format( commit_scope=( f"**{notice_commit_obj.scope}**: " if notice_commit_obj.scope else "" ), change_desc=notice_description.capitalize().rstrip(), ), "", ], ) if not isinstance(hvcs, Gitea): expected_content += str.join( os.linesep, [ "", "---", "", "**Detailed Changes**: [{prev_version}...{new_version}]({version_compare_url})".format( prev_version=prev_version.as_tag(), new_version=version.as_tag(), version_compare_url=hvcs.compare_url( prev_version.as_tag(), version.as_tag() ), ), "", ], ) actual_content = generate_release_notes( hvcs_client=hvcs_client(remote_url=example_git_https_url), release=release, template_dir=Path(""), history=release_history_w_notice_n_brk_change, style="angular", mask_initial_release=mask_initial_release, ) assert expected_content == actual_content @pytest.mark.parametrize("mask_initial_release", [True, False]) @pytest.mark.parametrize("hvcs_client", [Github, Gitlab, Gitea, Bitbucket]) def test_default_release_notes_template_w_multiple_notices( example_git_https_url: str, hvcs_client: type[Github | Gitlab | Gitea | Bitbucket], release_history_w_multiple_notices: ReleaseHistory, mask_initial_release: bool, today_date_str: str, ): """ Unit test goal: just make sure it renders the release notes template without error. Scenarios are better suited for all the variations (commit types). """ released_versions = iter(release_history_w_multiple_notices.released.keys()) version = next(released_versions) prev_version = next(released_versions) hvcs = hvcs_client(example_git_https_url) release = release_history_w_multiple_notices.released[version] feat_commit_obj = release["elements"]["Features"][0] refactor_commit_obj = release["elements"]["Refactoring"][0] assert isinstance(refactor_commit_obj, ParsedCommit) assert isinstance(feat_commit_obj, ParsedCommit) refactor_commit_url = hvcs.commit_hash_url(refactor_commit_obj.commit.hexsha) refactor_commit_desc = str.join("\n", refactor_commit_obj.descriptions) refactor_commit_notice_desc = str.join("\n", refactor_commit_obj.release_notices) feat_commit_url = hvcs.commit_hash_url(feat_commit_obj.commit.hexsha) feat_description = str.join("\n", feat_commit_obj.descriptions) feat_commit_notice_desc = str.join("\n", feat_commit_obj.release_notices) expected_content = str.join( os.linesep, [ f"## v{version} ({today_date_str})", "", "### Features", "", "- {commit_scope}{commit_desc} ([`{short_hash}`]({url}))".format( commit_scope=( f"**{feat_commit_obj.scope}**: " if feat_commit_obj.scope else "" ), commit_desc=feat_description.capitalize().rstrip(), short_hash=feat_commit_obj.commit.hexsha[:7], url=feat_commit_url, ), "", "### Refactoring", "", "- {commit_scope}{commit_desc} ([`{short_hash}`]({url}))".format( commit_scope=( f"**{refactor_commit_obj.scope}**: " if refactor_commit_obj.scope else "" ), commit_desc=refactor_commit_desc.capitalize().rstrip(), short_hash=refactor_commit_obj.commit.hexsha[:7], url=refactor_commit_url, ), "", "### Additional Release Information", "", "- {commit_scope}{change_desc}".format( commit_scope=( f"**{refactor_commit_obj.scope}**: " if refactor_commit_obj.scope else "" ), change_desc=refactor_commit_notice_desc.capitalize().rstrip(), ), "", "- {commit_scope}{change_desc}".format( commit_scope=( f"**{feat_commit_obj.scope}**: " if feat_commit_obj.scope else "" ), change_desc=feat_commit_notice_desc.capitalize().rstrip(), ), "", ], ) if not isinstance(hvcs, Gitea): expected_content += str.join( os.linesep, [ "", "---", "", "**Detailed Changes**: [{prev_version}...{new_version}]({version_compare_url})".format( prev_version=prev_version.as_tag(), new_version=version.as_tag(), version_compare_url=hvcs.compare_url( prev_version.as_tag(), version.as_tag() ), ), "", ], ) actual_content = generate_release_notes( hvcs_client=hvcs_client(remote_url=example_git_https_url), release=release, template_dir=Path(""), history=release_history_w_multiple_notices, style="angular", mask_initial_release=mask_initial_release, ) assert expected_content == actual_content python-semantic-release-9.21.0/tests/unit/semantic_release/changelog/test_template.py000066400000000000000000000044521475670435200311640ustar00rootroot00000000000000from __future__ import annotations # TODO: This tests for the main options that will help configuring a template, # but not all of them. The testing can be expanded to cover all the options later. # It's not super essential as Jinja2 does most of the testing, we're just checking # that we can properly set the right strings in the template environment. from textwrap import dedent from typing import TYPE_CHECKING import pytest from semantic_release.changelog.template import environment if TYPE_CHECKING: from typing import Any EXAMPLE_TEMPLATE_FORMAT_STR = """

This is an example template document

The title is {variable_start_string} title | upper {variable_end_string}

{comment_start_string}- This text should not appear {comment_end_string} {block_start_string}- for subject in subjects {block_end_string}

This is a paragraph about {variable_start_string} subject {variable_end_string}

{block_start_string}- endfor {block_end_string}""" @pytest.mark.parametrize( "format_map", [ { "block_start_string": "{%", "block_end_string": "%}", "variable_start_string": "{{", "variable_end_string": "}}", "comment_start_string": "{#", "comment_end_string": "#}", }, { "block_start_string": "{[", "block_end_string": "]}", "variable_start_string": "{{", "variable_end_string": "}}", "comment_start_string": "/*", "comment_end_string": "*/", }, ], ) @pytest.mark.parametrize( "subjects", [("dogs", "cats"), ("stocks", "finance", "politics")] ) def test_template_env_configurable(format_map: dict[str, Any], subjects: tuple[str]): template_as_str = EXAMPLE_TEMPLATE_FORMAT_STR.format_map(format_map) env = environment(**format_map) template = env.from_string(template_as_str) title = "important" newline = "\n" expected_result = dedent( f"""

This is an example template document

The title is {title.upper()}

{(newline + " " * 8).join(f'

This is a paragraph about {subject}

' for subject in subjects)}""" # noqa: E501 ) actual_result = template.render(title="important", subjects=subjects) assert expected_result == actual_result python-semantic-release-9.21.0/tests/unit/semantic_release/changelog/test_template_render.py000066400000000000000000000117311475670435200325210ustar00rootroot00000000000000from __future__ import annotations import itertools import os from typing import TYPE_CHECKING import pytest from semantic_release.changelog.template import environment, recursive_render if TYPE_CHECKING: from pathlib import Path from tests.fixtures.example_project import ExProjectDir NORMAL_TEMPLATE_SRC = """--- content: - a string - ["a nested list"] vars: # a comment hello: {{ "world" | upper }} """ NORMAL_TEMPLATE_RENDERED = """--- content: - a string - ["a nested list"] vars: # a comment hello: WORLD """ PLAINTEXT_FILE_CONTENT = """ I should not be rendered as a template. {{ "this string should be untouched" | upper }} """ def _strip_trailing_j2(path: Path) -> Path: if path.name.endswith(".j2"): return path.with_name(path.name[:-3]) return path @pytest.fixture def normal_template(example_project_template_dir: Path) -> Path: template = example_project_template_dir / "normal.yaml.j2" template.parent.mkdir(parents=True, exist_ok=True) template.write_text(NORMAL_TEMPLATE_SRC) return template @pytest.fixture def long_directory_path(example_project_template_dir: Path) -> Path: # NOTE: fixture enables using Path rather than # constant string, so no issue with / vs \ on Windows return example_project_template_dir / "long" / "dir" / "path" @pytest.fixture def deeply_nested_file(long_directory_path: Path) -> Path: file = long_directory_path / "buried.txt" file.parent.mkdir(parents=True, exist_ok=True) file.write_text(PLAINTEXT_FILE_CONTENT) return file @pytest.fixture def hidden_file(example_project_template_dir: Path) -> Path: file = example_project_template_dir / ".hidden" file.parent.mkdir(parents=True, exist_ok=True) file.write_text("I shouldn't be present") return file @pytest.fixture def directory_path_with_hidden_subfolder(example_project_template_dir: Path) -> Path: return example_project_template_dir / "path" / ".subfolder" / "hidden" @pytest.fixture def excluded_file(directory_path_with_hidden_subfolder: Path) -> Path: file = directory_path_with_hidden_subfolder / "excluded.txt" file.parent.mkdir(parents=True, exist_ok=True) file.write_text("I shouldn't be present") return file @pytest.mark.usefixtures(excluded_file.__name__) def test_recursive_render( init_example_project: None, example_project_dir: Path, example_project_template_dir: Path, normal_template, deeply_nested_file, hidden_file, ): tmpl_dir = str(example_project_template_dir.resolve()) env = environment(template_dir=tmpl_dir) preexisting_paths = set(example_project_dir.rglob("**/*")) recursive_render( template_dir=example_project_template_dir.resolve(), environment=env, _root_dir=str(example_project_dir.resolve()), ) rendered_normal_template = _strip_trailing_j2( example_project_dir / normal_template.relative_to(example_project_template_dir) ) assert rendered_normal_template.exists() assert rendered_normal_template.read_text() == NORMAL_TEMPLATE_RENDERED rendered_deeply_nested = example_project_dir / deeply_nested_file.relative_to( example_project_template_dir ) assert rendered_deeply_nested.exists() assert rendered_deeply_nested.read_text() == PLAINTEXT_FILE_CONTENT rendered_hidden = example_project_dir / hidden_file.relative_to( example_project_template_dir ) assert not rendered_hidden.exists() assert not (example_project_dir / "path").exists() assert set(example_project_dir.rglob("**/*")) == preexisting_paths.union( example_project_dir / p for t in ( rendered_normal_template, rendered_deeply_nested, ) for p in itertools.accumulate( t.relative_to(example_project_dir).parts, func=lambda *a: os.sep.join(a) ) ) @pytest.fixture def dotfolder_template_dir(example_project_dir: ExProjectDir) -> Path: return example_project_dir / ".templates/.psr-templates" @pytest.fixture def dotfolder_template( init_example_project: None, dotfolder_template_dir: Path ) -> Path: tmpl = dotfolder_template_dir / "template.txt" tmpl.parent.mkdir(parents=True, exist_ok=True) tmpl.write_text("I am a template") return tmpl def test_recursive_render_with_top_level_dotfolder( init_example_project: None, example_project_dir: ExProjectDir, dotfolder_template: Path, dotfolder_template_dir: Path, ): preexisting_paths = set(example_project_dir.rglob("**/*")) env = environment(template_dir=dotfolder_template_dir.resolve()) recursive_render( template_dir=dotfolder_template_dir.resolve(), environment=env, _root_dir=example_project_dir.resolve(), ) rendered_template = example_project_dir / dotfolder_template.name assert rendered_template.exists() assert set(example_project_dir.rglob("**/*")) == preexisting_paths.union( {example_project_dir / rendered_template} ) python-semantic-release-9.21.0/tests/unit/semantic_release/cli/000077500000000000000000000000001475670435200245535ustar00rootroot00000000000000python-semantic-release-9.21.0/tests/unit/semantic_release/cli/__init__.py000066400000000000000000000000001475670435200266520ustar00rootroot00000000000000python-semantic-release-9.21.0/tests/unit/semantic_release/cli/test_config.py000066400000000000000000000346061475670435200274420ustar00rootroot00000000000000from __future__ import annotations import os import shutil import sys from pathlib import Path, PurePosixPath from re import compile as regexp from typing import TYPE_CHECKING from unittest import mock import pytest import tomlkit from pydantic import RootModel, ValidationError from urllib3.util.url import parse_url import semantic_release from semantic_release.cli.config import ( BranchConfig, ChangelogConfig, ChangelogOutputFormat, GlobalCommandLineOptions, HvcsClient, RawConfig, RuntimeContext, _known_hvcs, ) from semantic_release.cli.util import load_raw_config_file from semantic_release.commit_parser.conventional import ConventionalCommitParserOptions from semantic_release.commit_parser.emoji import EmojiParserOptions from semantic_release.commit_parser.scipy import ScipyParserOptions from semantic_release.commit_parser.tag import TagParserOptions from semantic_release.const import DEFAULT_COMMIT_AUTHOR from semantic_release.enums import LevelBump from semantic_release.errors import ParserLoadError from tests.fixtures.repos import repo_w_no_tags_conventional_commits from tests.util import ( CustomParserOpts, CustomParserWithNoOpts, CustomParserWithOpts, IncompleteCustomParser, ) if TYPE_CHECKING: from typing import Any from tests.fixtures.example_project import ExProjectDir, UpdatePyprojectTomlFn from tests.fixtures.git_repo import BuildRepoFn, BuiltRepoResult, CommitConvention @pytest.mark.parametrize( "patched_os_environ, remote_config, expected_token", [ ( {"GH_TOKEN": "mytoken"}, {"type": HvcsClient.GITHUB.value}, "mytoken", ), ( {"GITLAB_TOKEN": "mytoken"}, {"type": HvcsClient.GITLAB.value}, "mytoken", ), ( {"GITEA_TOKEN": "mytoken"}, {"type": HvcsClient.GITEA.value}, "mytoken", ), ( # default not provided -> means Github {"GH_TOKEN": "mytoken"}, {}, "mytoken", ), ( {"CUSTOM_TOKEN": "mytoken"}, {"type": HvcsClient.GITHUB.value, "token": {"env": "CUSTOM_TOKEN"}}, "mytoken", ), ], ) def test_load_hvcs_default_token( patched_os_environ: dict[str, str], remote_config: dict[str, Any], expected_token: str, ): with mock.patch.dict(os.environ, patched_os_environ, clear=True): raw_config = RawConfig.model_validate( { "remote": remote_config, } ) assert expected_token == raw_config.remote.token @pytest.mark.parametrize("remote_config", [{"type": "nonexistent"}]) def test_invalid_hvcs_type(remote_config: dict[str, Any]): with pytest.raises(ValidationError) as excinfo: RawConfig.model_validate( { "remote": remote_config, } ) assert "remote.type" in str(excinfo.value) @pytest.mark.parametrize( "commit_parser, expected_parser_opts", [ ( None, RootModel(ConventionalCommitParserOptions()).model_dump(), ), # default not provided -> means conventional ("conventional", RootModel(ConventionalCommitParserOptions()).model_dump()), ("emoji", RootModel(EmojiParserOptions()).model_dump()), ("scipy", RootModel(ScipyParserOptions()).model_dump()), ("tag", RootModel(TagParserOptions()).model_dump()), (f"{CustomParserWithNoOpts.__module__}:{CustomParserWithNoOpts.__name__}", {}), ( f"{CustomParserWithOpts.__module__}:{CustomParserWithOpts.__name__}", RootModel(CustomParserOpts()).model_dump(), ), ], ) def test_load_default_parser_opts( commit_parser: str | None, expected_parser_opts: dict[str, Any] ): raw_config = RawConfig.model_validate( # Since TOML does not support NoneTypes, we need to not include the key {"commit_parser": commit_parser} if commit_parser else {} ) assert expected_parser_opts == raw_config.commit_parser_options def test_load_user_defined_parser_opts(): user_defined_opts = { "allowed_tags": ["foo", "bar", "baz"], "minor_tags": ["bar"], "patch_tags": ["baz"], "default_bump_level": LevelBump.PATCH.value, } raw_config = RawConfig.model_validate( { "commit_parser": "conventional", "commit_parser_options": user_defined_opts, } ) assert user_defined_opts == raw_config.commit_parser_options @pytest.mark.parametrize("commit_parser", [""]) def test_invalid_commit_parser_value(commit_parser: str): with pytest.raises(ValidationError) as excinfo: RawConfig.model_validate( { "commit_parser": commit_parser, } ) assert "commit_parser" in str(excinfo.value) def test_default_toml_config_valid(example_project_dir: ExProjectDir): default_config_file = example_project_dir / "default.toml" default_config_file.write_text( tomlkit.dumps(RawConfig().model_dump(mode="json", exclude_none=True)) ) written = default_config_file.read_text(encoding="utf-8") loaded = tomlkit.loads(written).unwrap() # Check that we can load it correctly parsed = RawConfig.model_validate(loaded) assert parsed # Check the re-loaded internal representation is sufficient # There is an issue with BaseModel.__eq__ that means # comparing directly doesn't work with parsed.dict(); this # is because of how tomlkit parsed toml @pytest.mark.parametrize( "mock_env, expected_author", [ ({}, DEFAULT_COMMIT_AUTHOR), ({"GIT_COMMIT_AUTHOR": "foo "}, "foo "), ], ) @pytest.mark.usefixtures(repo_w_no_tags_conventional_commits.__name__) def test_commit_author_configurable( example_pyproject_toml: Path, mock_env: dict[str, str], expected_author: str, change_to_ex_proj_dir: None, ): content = tomlkit.loads(example_pyproject_toml.read_text(encoding="utf-8")).unwrap() with mock.patch.dict(os.environ, mock_env): raw = RawConfig.model_validate(content) runtime = RuntimeContext.from_raw_config( raw=raw, global_cli_options=GlobalCommandLineOptions(), ) resulting_author = ( f"{runtime.commit_author.name} <{runtime.commit_author.email}>" ) assert expected_author == resulting_author def test_load_valid_runtime_config( build_configured_base_repo: BuildRepoFn, example_project_dir: ExProjectDir, example_pyproject_toml: Path, update_pyproject_toml: UpdatePyprojectTomlFn, change_to_ex_proj_dir: None, ): build_configured_base_repo(example_project_dir) # Wipe out any existing configuration options update_pyproject_toml(f"tool.{semantic_release.__name__}", {}) runtime_ctx = RuntimeContext.from_raw_config( RawConfig.model_validate(load_raw_config_file(example_pyproject_toml)), global_cli_options=GlobalCommandLineOptions(), ) # TODO: add more validation assert runtime_ctx @pytest.mark.parametrize( "commit_parser", [ # Module:Class string f"{CustomParserWithNoOpts.__module__}:{CustomParserWithNoOpts.__name__}", f"{CustomParserWithOpts.__module__}:{CustomParserWithOpts.__name__}", # File path module:Class string f"{CustomParserWithNoOpts.__module__.replace('.', '/')}.py:{CustomParserWithNoOpts.__name__}", f"{CustomParserWithOpts.__module__.replace('.', '/')}.py:{CustomParserWithOpts.__name__}", ], ) def test_load_valid_runtime_config_w_custom_parser( commit_parser: CommitConvention, build_configured_base_repo: BuildRepoFn, example_project_dir: ExProjectDir, example_pyproject_toml: Path, change_to_ex_proj_dir: None, request: pytest.FixtureRequest, ): fake_sys_modules = {**sys.modules} if ".py" in commit_parser: module_filepath = Path(commit_parser.split(":")[0]) module_filepath.parent.mkdir(parents=True, exist_ok=True) module_filepath.parent.joinpath("__init__.py").touch() shutil.copy( src=str(request.config.rootpath / module_filepath), dst=str(module_filepath), ) fake_sys_modules.pop( str(Path(module_filepath).with_suffix("")).replace(os.sep, ".") ) build_configured_base_repo( example_project_dir, commit_type=commit_parser, ) with mock.patch.dict(sys.modules, fake_sys_modules, clear=True): assert RuntimeContext.from_raw_config( RawConfig.model_validate(load_raw_config_file(example_pyproject_toml)), global_cli_options=GlobalCommandLineOptions(), ) @pytest.mark.parametrize( "commit_parser", [ # Non-existant module "tests.missing_module:CustomParser", # Non-existant class f"{CustomParserWithOpts.__module__}:MissingCustomParser", # Incomplete class implementation f"{IncompleteCustomParser.__module__}:{IncompleteCustomParser.__name__}", # Non-existant module file "tests/missing_module.py:CustomParser", # Non-existant class in module file f"{CustomParserWithOpts.__module__.replace('.', '/')}.py:MissingCustomParser", # Incomplete class implementation in module file f"{IncompleteCustomParser.__module__.replace('.', '/')}.py:{IncompleteCustomParser.__name__}", ], ) def test_load_invalid_custom_parser( commit_parser: str, build_configured_base_repo: BuildRepoFn, example_project_dir: ExProjectDir, example_pyproject_toml: Path, update_pyproject_toml: UpdatePyprojectTomlFn, pyproject_toml_config_option_parser: str, change_to_ex_proj_dir: None, ): build_configured_base_repo(example_project_dir) # Wipe out any existing configuration options update_pyproject_toml(f"{pyproject_toml_config_option_parser}_options", {}) # Insert invalid custom parser string into configuration update_pyproject_toml(pyproject_toml_config_option_parser, commit_parser) with pytest.raises(ParserLoadError): RuntimeContext.from_raw_config( RawConfig.model_validate(load_raw_config_file(example_pyproject_toml)), global_cli_options=GlobalCommandLineOptions(), ) def test_branch_config_with_plain_wildcard(): branch_config = BranchConfig( match="*", ) assert branch_config.match == ".*" @pytest.mark.parametrize( "invalid_regex", [ "*abc", "[a-z", "(.+", "{2,3}", "a{3,2}", ], ) def test_branch_config_with_invalid_regex(invalid_regex: str): with pytest.raises(ValidationError): BranchConfig( match=invalid_regex, ) @pytest.mark.parametrize( "valid_patterns", [ # Single entry [r"chore(?:\([^)]*?\))?: .+"], # Multiple entries [r"^\d+\.\d+\.\d+", r"Initial [Cc]ommit.*"], ], ) def test_changelog_config_with_valid_exclude_commit_patterns(valid_patterns: list[str]): assert ChangelogConfig.model_validate( { "exclude_commit_patterns": valid_patterns, } ) @pytest.mark.parametrize( "invalid_patterns, index_of_invalid_pattern", [ # Single entry, single incorrect (["*abc"], 0), # Two entries, second incorrect ([".*", "[a-z"], 1), # Two entries, first incorrect (["(.+", ".*"], 0), ], ) def test_changelog_config_with_invalid_exclude_commit_patterns( invalid_patterns: list[str], index_of_invalid_pattern: int, ): with pytest.raises( ValidationError, match=regexp( str.join( "", [ r".*\bexclude_commit_patterns\[", str(index_of_invalid_pattern), r"\]: Invalid regular expression", ], ), ), ): ChangelogConfig.model_validate( { "exclude_commit_patterns": invalid_patterns, } ) @pytest.mark.parametrize( "output_format, insertion_flag", [ ( ChangelogOutputFormat.MARKDOWN.value, "", ), ( ChangelogOutputFormat.RESTRUCTURED_TEXT.value, f"..{os.linesep} version list", ), ], ) def test_changelog_config_default_insertion_flag( output_format: str, insertion_flag: str, ): changelog_config = ChangelogConfig.model_validate( { "default_templates": { "output_format": output_format, } } ) assert changelog_config.insertion_flag == insertion_flag @pytest.mark.parametrize( "hvcs_type", [k.value for k in _known_hvcs], ) def test_git_remote_url_w_insteadof_alias( repo_w_initial_commit: BuiltRepoResult, example_pyproject_toml: Path, example_git_https_url: str, hvcs_type: str, update_pyproject_toml: UpdatePyprojectTomlFn, ): expected_url = parse_url(example_git_https_url) repo_name_suffix = PurePosixPath(expected_url.path or "").name insteadof_alias = "psr_test_insteadof" insteadof_value = expected_url.url.replace(repo_name_suffix, "") repo = repo_w_initial_commit["repo"] with repo.config_writer() as cfg: # Setup: define the insteadOf replacement value cfg.add_value(f'url "{insteadof_value}"', "insteadof", f"{insteadof_alias}:") # Setup: set the remote URL with an insteadOf alias cfg.set_value('remote "origin"', "url", f"{insteadof_alias}:{repo_name_suffix}") # Setup: set each supported HVCS client type update_pyproject_toml("tool.semantic_release.remote.type", hvcs_type) # Act: load the configuration (in clear environment) with mock.patch.dict(os.environ, {}, clear=True): # Essentially the same as CliContextObj._init_runtime_ctx() project_config = tomlkit.loads( example_pyproject_toml.read_text(encoding="utf-8") ).unwrap() runtime = RuntimeContext.from_raw_config( raw=RawConfig.model_validate( project_config.get("tool", {}).get("semantic_release", {}), ), global_cli_options=GlobalCommandLineOptions(), ) # Trigger a function that calls helpers.parse_git_url() actual_url = runtime.hvcs_client.remote_url(use_token=False) # Evaluate: the remote URL should be the full URL assert expected_url.url == actual_url python-semantic-release-9.21.0/tests/unit/semantic_release/cli/test_github_actions_output.py000066400000000000000000000045441475670435200326150ustar00rootroot00000000000000from __future__ import annotations from textwrap import dedent from typing import TYPE_CHECKING import pytest from semantic_release.cli.github_actions_output import VersionGitHubActionsOutput from semantic_release.version.version import Version from tests.util import actions_output_to_dict if TYPE_CHECKING: from pathlib import Path @pytest.mark.parametrize( "version, is_prerelease", [ ("1.2.3", False), ("1.2.3-alpha.1", True), ], ) @pytest.mark.parametrize("released", (True, False)) def test_version_github_actions_output_format( released: bool, version: str, is_prerelease: bool ): expected_output = dedent( f"""\ released={'true' if released else 'false'} version={version} tag=v{version} is_prerelease={'true' if is_prerelease else 'false'} """ ) output = VersionGitHubActionsOutput( released=released, version=Version.parse(version), ) # Evaluate (expected -> actual) assert expected_output == output.to_output_text() def test_version_github_actions_output_fails_if_missing_output(): output = VersionGitHubActionsOutput( version=Version.parse("1.2.3"), ) # Execute with expected failure with pytest.raises(ValueError, match="required outputs were not set"): output.to_output_text() def test_version_github_actions_output_writes_to_github_output_if_available( monkeypatch: pytest.MonkeyPatch, tmp_path: Path ): mock_output_file = tmp_path / "action.out" version_str = "1.2.3" monkeypatch.setenv("GITHUB_OUTPUT", str(mock_output_file.resolve())) output = VersionGitHubActionsOutput( version=Version.parse(version_str), released=True, ) output.write_if_possible() action_outputs = actions_output_to_dict( mock_output_file.read_text(encoding="utf-8") ) # Evaluate (expected -> actual) assert version_str == action_outputs["version"] assert str(True).lower() == action_outputs["released"] assert str(False).lower() == action_outputs["is_prerelease"] def test_version_github_actions_output_no_error_if_not_in_gha( monkeypatch: pytest.MonkeyPatch, ): output = VersionGitHubActionsOutput( version=Version.parse("1.2.3"), released=True, ) monkeypatch.delenv("GITHUB_OUTPUT", raising=False) output.write_if_possible() python-semantic-release-9.21.0/tests/unit/semantic_release/cli/test_masking_filter.py000066400000000000000000000143311475670435200311640ustar00rootroot00000000000000import io import logging import random import re import string from logging import LogRecord import pytest from semantic_release.cli.masking_filter import MaskingFilter random.seed(0) def _random_string(length: int = 10) -> str: alphabet = ( string.ascii_lowercase + string.ascii_uppercase + string.digits + string.punctuation ) return "".join(random.choice(alphabet) for _ in range(length)) @pytest.fixture def default_masking_filter(): return MaskingFilter() @pytest.fixture def logging_output_stream(): return io.StringIO() @pytest.fixture def logger(logging_output_stream, default_masking_filter): root = logging.getLogger() root.setLevel(logging.DEBUG) root.addHandler(logging.StreamHandler(logging_output_stream)) log = logging.getLogger(__name__) log.setLevel(logging.DEBUG) for h in root.handlers: h.addFilter(default_masking_filter) return log @pytest.mark.parametrize( "unwanted", [f(obj) for f in (repr, str) for obj in ("", None)] ) def test_unwanted_masks_not_applied(default_masking_filter, unwanted): default_masking_filter.add_mask_for(unwanted, "foo") assert default_masking_filter._redact_patterns["foo"] == set() test_str = f"A long string containing the unwanted {unwanted} data" assert default_masking_filter.mask(test_str) == test_str @pytest.mark.parametrize( "masked, secret", [ ("secret-token", "secret-token"), (re.compile(r"ghp_.+?(?=\s|$)"), "ghp_" + _random_string(15)), ], ) @pytest.mark.parametrize("use_named_masks", (True, False)) def test_mask_applied(use_named_masks, masked, secret): masker = MaskingFilter(_use_named_masks=use_named_masks) masker.add_mask_for(masked, "secret") test_str = "Your secret is... {secret} preferably hidden" assert masker.mask(test_str.format(secret=secret)) == test_str.format( secret="<'secret' (value removed)>" if use_named_masks else masker.REPLACE_STR ) _secrets = ( "token" + _random_string(), "token" + _random_string(), "secret" + _random_string(), "secret" + _random_string(), ) @pytest.mark.parametrize( "masked, secrets", [ (_secrets, _secrets), ((re.compile(r"token.+?(?=\s|$)"), re.compile(r"secret.+?(?=\s|$)")), _secrets), ], ) def test_multiple_secrets_with_same_mask(masked, secrets): masker = MaskingFilter(_use_named_masks=True) for mask in masked: masker.add_mask_for(mask, "ksam") test_str = " ".join(secrets) assert masker.mask(test_str) == " ".join( "<'ksam' (value removed)>" for _ in secrets ) def test_secrets_exact_replacement(): masker = MaskingFilter(_use_named_masks=True) for secret in _secrets: masker.add_mask_for(secret, "smak") test_str = ", ".join(_secrets) + "!" assert ( masker.mask(test_str) == ", ".join("<'smak' (value removed)>" for _ in _secrets) + "!" ) @pytest.mark.parametrize( "rec", [ LogRecord( name=__name__, level=logging.INFO, pathname=__file__, lineno=10, args=(_secrets[3],), msg="long message with format %s for secret", exc_info=None, ), LogRecord( name=__name__, level=logging.INFO, pathname=__file__, lineno=10, args={"secret1": _secrets[1], "secret2": _secrets[2]}, msg="another message using %(secret1)s and %(secret2)s", exc_info=None, ), ], ) @pytest.mark.parametrize( "masked", (_secrets, (re.compile(r"(secret|token).+?(?=\s|$)"),)) ) def test_log_record_is_masked_with_simple_args(default_masking_filter, rec, masked): for mask in masked: default_masking_filter.add_mask_for(mask) if isinstance(rec.args, tuple): assert rec.msg % tuple( default_masking_filter.REPLACE_STR for _ in rec.args ) == default_masking_filter.mask(rec.getMessage()) elif isinstance(rec.args, dict): assert rec.msg % { k: default_masking_filter.REPLACE_STR for k in rec.args } == default_masking_filter.mask(rec.getMessage()) @pytest.mark.parametrize( "rec", [ LogRecord( name=__name__, level=logging.INFO, pathname=__file__, lineno=10, args=(_secrets,), msg="long message with format %s for secrets", exc_info=None, ), LogRecord( name=__name__, level=logging.INFO, pathname=__file__, lineno=10, args={"secret1": _secrets[1], "other": _secrets[2:]}, msg="another message using %(secret1)s and %(other)r", exc_info=None, ), ], ) @pytest.mark.parametrize( "masked", (_secrets, (re.compile(r"(secret|token).+?(?=\s|$)"),)) ) def test_log_record_is_masked_with_nontrivial_args(default_masking_filter, rec, masked): for mask in masked: default_masking_filter.add_mask_for(mask) assert any(secret in rec.getMessage() for secret in _secrets) assert all( secret not in default_masking_filter.mask(rec.getMessage()) for secret in _secrets ) @pytest.mark.parametrize( "log_level", ( logging.DEBUG, logging.INFO, logging.WARNING, logging.ERROR, logging.CRITICAL, ), ) def test_log_messages_are_masked( default_masking_filter, log_level, logging_output_stream, logger, tmp_path ): for secret in _secrets: default_masking_filter.add_mask_for(secret) logger.log(log_level, ", ".join("%s" for _ in _secrets), *_secrets) root = logging.getLogger() for h in (*root.handlers, *logger.handlers): h.flush() written = logging_output_stream.getvalue() assert all(secret not in written for secret in _secrets) @pytest.mark.parametrize("obj", (object(), (), {}, AttributeError("whoopsie"))) def test_non_strings_are_returned(default_masking_filter, obj): rec = LogRecord( name=__name__, level=logging.INFO, pathname=__file__, lineno=10, args=(), msg=obj, exc_info=None, ) assert default_masking_filter.mask(rec.getMessage()) == str(obj) python-semantic-release-9.21.0/tests/unit/semantic_release/cli/test_util.py000066400000000000000000000104151475670435200271420ustar00rootroot00000000000000from __future__ import annotations import json from textwrap import dedent import pytest from pytest_lazy_fixtures.lazy_fixture import lf as lazy_fixture from semantic_release.cli.util import load_raw_config_file, parse_toml from semantic_release.errors import InvalidConfiguration @pytest.mark.parametrize( "toml_text, expected", [ ( dedent( r""" [not_the_right_key] foo = "bar" """ ), {}, ), ( dedent( r""" [semantic_release] foo = "bar" """ ), {"foo": "bar"}, ), ( dedent( r""" [tool.semantic_release] abc = 123 [tool.semantic_release.foo] def = 456 """ ), {"abc": 123, "foo": {"def": 456}}, ), ], ) def test_parse_toml(toml_text, expected): assert parse_toml(toml_text) == expected def test_parse_toml_raises_invalid_configuration_with_invalid_toml(): invalid_toml = dedent( r""" [semantic_release] foo = bar # this is not a valid TOML string """ ) with pytest.raises(InvalidConfiguration): parse_toml(invalid_toml) @pytest.fixture def raw_toml_config_file(tmp_path): path = tmp_path / "config.toml" path.write_text( dedent( r""" [semantic_release] foo = "bar" [semantic_release.abc] bar = "baz" """ ) ) return path @pytest.fixture def raw_pyproject_toml_config_file(tmp_path): tmp_path.mkdir(exist_ok=True) path = tmp_path / "pyproject.toml" path.write_text( dedent( r""" [tool.semantic_release] foo = "bar" [tool.semantic_release.abc] bar = "baz" """ ) ) return path @pytest.fixture def raw_json_config_file(tmp_path): tmp_path.mkdir(exist_ok=True) path = tmp_path / ".releaserc" path.write_text( json.dumps( {"semantic_release": {"foo": "bar", "abc": {"bar": "baz"}}}, indent=4 ) ) return path @pytest.fixture def invalid_toml_config_file(tmp_path): path = tmp_path / "config.toml" path.write_text( dedent( r""" [semantic_release] foo = bar # no quotes == invalid [semantic_release.abc] bar = "baz" """ ) ) return path @pytest.fixture def invalid_json_config_file(tmp_path): tmp_path.mkdir(exist_ok=True) path = tmp_path / "releaserc.json" path.write_text( dedent( r""" {"semantic_release": {foo: "bar", "abc": {bar: "baz"}}} """ ) ) return path @pytest.fixture def invalid_other_config_file(tmp_path): # e.g. XML path = tmp_path / "config.xml" path.write_text( dedent( r""" bar baz """ ) ) return path @pytest.mark.parametrize( "raw_config_file, expected", [ ( lazy_fixture(raw_toml_config_file.__name__), {"foo": "bar", "abc": {"bar": "baz"}}, ), ( lazy_fixture(raw_pyproject_toml_config_file.__name__), {"foo": "bar", "abc": {"bar": "baz"}}, ), ( lazy_fixture(raw_json_config_file.__name__), {"foo": "bar", "abc": {"bar": "baz"}}, ), ], ) def test_load_raw_config_file_loads_config(raw_config_file, expected): assert load_raw_config_file(raw_config_file) == expected @pytest.mark.parametrize( "raw_config_file", [ lazy_fixture(invalid_toml_config_file.__name__), lazy_fixture(invalid_json_config_file.__name__), lazy_fixture(invalid_other_config_file.__name__), ], ) def test_load_raw_invalid_config_file_raises_error(raw_config_file): with pytest.raises(InvalidConfiguration): load_raw_config_file(raw_config_file) python-semantic-release-9.21.0/tests/unit/semantic_release/cli/test_version.py000066400000000000000000000014371475670435200276560ustar00rootroot00000000000000import pytest from semantic_release.cli.commands.version import is_forced_prerelease @pytest.mark.parametrize( "force_prerelease, force_level, prerelease, expected", [ *[ (True, force_level, prerelease, True) for force_level in (None, "major", "minor", "patch") for prerelease in (True, False) ], *[ (False, force_level, prerelease, False) for force_level in ("major", "minor", "patch") for prerelease in (True, False) ], *[(False, None, prerelease, prerelease) for prerelease in (True, False)], ], ) def test_is_forced_prerelease(force_prerelease, force_level, prerelease, expected): assert is_forced_prerelease(force_prerelease, force_level, prerelease) == expected python-semantic-release-9.21.0/tests/unit/semantic_release/commit_parser/000077500000000000000000000000001475670435200266505ustar00rootroot00000000000000python-semantic-release-9.21.0/tests/unit/semantic_release/commit_parser/__init__.py000066400000000000000000000000001475670435200307470ustar00rootroot00000000000000python-semantic-release-9.21.0/tests/unit/semantic_release/commit_parser/test_conventional.py000066400000000000000000001342731475670435200327720ustar00rootroot00000000000000from __future__ import annotations from textwrap import dedent from typing import TYPE_CHECKING, Iterable, Sequence import pytest from semantic_release.commit_parser.conventional import ( ConventionalCommitParser, ConventionalCommitParserOptions, ) from semantic_release.commit_parser.token import ParsedCommit, ParseError from semantic_release.enums import LevelBump from tests.const import SUPPORTED_ISSUE_CLOSURE_PREFIXES if TYPE_CHECKING: from tests.conftest import MakeCommitObjFn # NOTE: GitLab squash commits are not tested because by default # they don't have any unique attributes of them and they are also # fully customizable. # See https://docs.gitlab.com/ee/user/project/merge_requests/commit_templates.html # It also depends if Fast-Forward merge is enabled because that will # define if there is a merge commit or not and with that likely no # Merge Request Number included unless the user adds it. # TODO: add the recommendation in the PSR documentation is to set your GitLab templates # to mirror GitHub like references in the first subject line. Will Not matter # if fast-forward merge is enabled or not. @pytest.mark.parametrize( "commit_message", ["", "feat(parser\n): Add new parser pattern"] ) def test_parser_raises_unknown_message_style( default_conventional_parser: ConventionalCommitParser, make_commit_obj: MakeCommitObjFn, commit_message: str, ): parsed_results = default_conventional_parser.parse(make_commit_obj(commit_message)) assert isinstance(parsed_results, Iterable) for result in parsed_results: assert isinstance(result, ParseError) @pytest.mark.parametrize( "commit_message, expected_commit_details", [ pytest.param( commit_message, expected_commit_details, id=test_id, ) for test_id, commit_message, expected_commit_details in [ ( "Single commit squashed via BitBucket PR resolution", dedent( """\ Merged in feat/my-awesome-stuff (pull request #10) fix(release-config): some commit subject An additional description Second paragraph with multiple lines that will be condensed Resolves: #12 Signed-off-by: author """ ), [ None, { "bump": LevelBump.PATCH, "type": "bug fixes", "scope": "release-config", "descriptions": [ "some commit subject", "An additional description", "Second paragraph with multiple lines that will be condensed", "Resolves: #12", "Signed-off-by: author ", ], "linked_issues": ("#12",), "linked_merge_request": "#10", }, ], ), ( "Multiple commits squashed via BitBucket PR resolution", dedent( """\ Merged in feat/my-awesome-stuff (pull request #10) fix(release-config): some commit subject An additional description Second paragraph with multiple lines that will be condensed Resolves: #12 Signed-off-by: author feat: implemented searching gizmos by keyword docs(parser): add new parser pattern fix(cli)!: changed option name BREAKING CHANGE: A breaking change description Closes: #555 invalid non-conventional formatted commit """ ), [ None, { "bump": LevelBump.PATCH, "type": "bug fixes", "scope": "release-config", "descriptions": [ "some commit subject", "An additional description", "Second paragraph with multiple lines that will be condensed", "Resolves: #12", "Signed-off-by: author ", ], "linked_issues": ("#12",), "linked_merge_request": "#10", }, { "bump": LevelBump.MINOR, "type": "features", "descriptions": ["implemented searching gizmos by keyword"], "linked_merge_request": "#10", }, { "bump": LevelBump.NO_RELEASE, "type": "documentation", "scope": "parser", "descriptions": [ "add new parser pattern", ], "linked_merge_request": "#10", }, { "bump": LevelBump.MAJOR, "type": "bug fixes", "scope": "cli", "descriptions": [ "changed option name", "BREAKING CHANGE: A breaking change description", "Closes: #555", # This is a bit unusual but its because there is no identifier that will # identify this as a separate commit so it gets included in the previous commit "invalid non-conventional formatted commit", ], "breaking_descriptions": [ "A breaking change description", ], "linked_issues": ("#555",), "linked_merge_request": "#10", }, ], ), ] ], ) def test_parser_squashed_commit_bitbucket_squash_style( default_conventional_parser: ConventionalCommitParser, make_commit_obj: MakeCommitObjFn, commit_message: str, expected_commit_details: Sequence[dict | None], ): # Setup: Enable squash commit parsing parser = ConventionalCommitParser( options=ConventionalCommitParserOptions( **{ **default_conventional_parser.options.__dict__, "parse_squash_commits": True, } ) ) # Build the commit object and parse it the_commit = make_commit_obj(commit_message) parsed_results = parser.parse(the_commit) # Validate the results assert isinstance(parsed_results, Iterable) assert ( len(expected_commit_details) == len(parsed_results) ), f"Expected {len(expected_commit_details)} parsed results, but got {len(parsed_results)}" for result, expected in zip(parsed_results, expected_commit_details): if expected is None: assert isinstance(result, ParseError) continue assert isinstance(result, ParsedCommit) # Required assert expected["bump"] == result.bump assert expected["type"] == result.type # Optional assert expected.get("scope", "") == result.scope # TODO: v10 change to tuples assert expected.get("descriptions", []) == result.descriptions assert expected.get("breaking_descriptions", []) == result.breaking_descriptions assert expected.get("linked_issues", ()) == result.linked_issues assert expected.get("linked_merge_request", "") == result.linked_merge_request @pytest.mark.parametrize( "commit_message, expected_commit_details", [ pytest.param( commit_message, expected_commit_details, id=test_id, ) for test_id, commit_message, expected_commit_details in [ ( "Single commit squashed via manual Git squash merge", dedent( """\ Squashed commit of the following: commit 63ec09b9e844e616dcaa7bae35a0b66671b59fbb Author: author Date: Sun Jan 19 12:05:23 2025 +0000 fix(release-config): some commit subject An additional description Second paragraph with multiple lines that will be condensed Resolves: #12 Signed-off-by: author """ ), [ { "bump": LevelBump.PATCH, "type": "bug fixes", "scope": "release-config", "descriptions": [ "some commit subject", "An additional description", "Second paragraph with multiple lines that will be condensed", "Resolves: #12", "Signed-off-by: author ", ], "linked_issues": ("#12",), } ], ), ( "Multiple commits squashed via manual Git squash merge", dedent( """\ Squashed commit of the following: commit 63ec09b9e844e616dcaa7bae35a0b66671b59fbb Author: author Date: Sun Jan 19 12:05:23 2025 +0000 fix(release-config): some commit subject An additional description Second paragraph with multiple lines that will be condensed Resolves: #12 Signed-off-by: author commit 1f34769bf8352131ad6f4879b8c47becf3c7aa69 Author: author Date: Sat Jan 18 10:13:53 2025 +0000 feat: implemented searching gizmos by keyword commit b2334a64a11ef745a17a2a4034f651e08e8c45a6 Author: author Date: Sat Jan 18 10:13:53 2025 +0000 docs(parser): add new parser pattern commit 5f0292fb5a88c3a46e4a02bec35b85f5228e8e51 Author: author Date: Sat Jan 18 10:13:53 2025 +0000 fix(cli)!: changed option name BREAKING CHANGE: A breaking change description Closes: #555 commit 2f314e7924be161cfbf220d3b6e2a6189a3b5609 Author: author Date: Sat Jan 18 10:13:53 2025 +0000 invalid non-conventional formatted commit """ ), [ { "bump": LevelBump.PATCH, "type": "bug fixes", "scope": "release-config", "descriptions": [ "some commit subject", "An additional description", "Second paragraph with multiple lines that will be condensed", "Resolves: #12", "Signed-off-by: author ", ], "linked_issues": ("#12",), }, { "bump": LevelBump.MINOR, "type": "features", "descriptions": ["implemented searching gizmos by keyword"], }, { "bump": LevelBump.NO_RELEASE, "type": "documentation", "scope": "parser", "descriptions": [ "add new parser pattern", ], }, { "bump": LevelBump.MAJOR, "type": "bug fixes", "scope": "cli", "descriptions": [ "changed option name", "BREAKING CHANGE: A breaking change description", "Closes: #555", ], "breaking_descriptions": [ "A breaking change description", ], "linked_issues": ("#555",), }, None, ], ), ] ], ) def test_parser_squashed_commit_git_squash_style( default_conventional_parser: ConventionalCommitParser, make_commit_obj: MakeCommitObjFn, commit_message: str, expected_commit_details: Sequence[dict | None], ): # Setup: Enable squash commit parsing parser = ConventionalCommitParser( options=ConventionalCommitParserOptions( **{ **default_conventional_parser.options.__dict__, "parse_squash_commits": True, } ) ) # Build the commit object and parse it the_commit = make_commit_obj(commit_message) parsed_results = parser.parse(the_commit) # Validate the results assert isinstance(parsed_results, Iterable) assert ( len(expected_commit_details) == len(parsed_results) ), f"Expected {len(expected_commit_details)} parsed results, but got {len(parsed_results)}" for result, expected in zip(parsed_results, expected_commit_details): if expected is None: assert isinstance(result, ParseError) continue assert isinstance(result, ParsedCommit) # Required assert expected["bump"] == result.bump assert expected["type"] == result.type # Optional assert expected.get("scope", "") == result.scope # TODO: v10 change to tuples assert expected.get("descriptions", []) == result.descriptions assert expected.get("breaking_descriptions", []) == result.breaking_descriptions assert expected.get("linked_issues", ()) == result.linked_issues assert expected.get("linked_merge_request", "") == result.linked_merge_request @pytest.mark.parametrize( "commit_message, expected_commit_details", [ pytest.param( commit_message, expected_commit_details, id=test_id, ) for test_id, commit_message, expected_commit_details in [ ( "Single commit squashed via GitHub PR resolution", dedent( """\ fix(release-config): some commit subject (#10) An additional description Second paragraph with multiple lines that will be condensed Resolves: #12 Signed-off-by: author """ ), [ { "bump": LevelBump.PATCH, "type": "bug fixes", "scope": "release-config", "descriptions": [ # TODO: v10 removal of PR number from subject "some commit subject (#10)", "An additional description", "Second paragraph with multiple lines that will be condensed", "Resolves: #12", "Signed-off-by: author ", ], "linked_issues": ("#12",), "linked_merge_request": "#10", }, ], ), ( "Multiple commits squashed via GitHub PR resolution", dedent( """\ fix(release-config): some commit subject (#10) An additional description Second paragraph with multiple lines that will be condensed Resolves: #12 Signed-off-by: author * feat: implemented searching gizmos by keyword * docs(parser): add new parser pattern * fix(cli)!: changed option name BREAKING CHANGE: A breaking change description Closes: #555 * invalid non-conventional formatted commit """ ), [ { "bump": LevelBump.PATCH, "type": "bug fixes", "scope": "release-config", "descriptions": [ # TODO: v10 removal of PR number from subject "some commit subject (#10)", "An additional description", "Second paragraph with multiple lines that will be condensed", "Resolves: #12", "Signed-off-by: author ", ], "linked_issues": ("#12",), "linked_merge_request": "#10", }, { "bump": LevelBump.MINOR, "type": "features", "descriptions": ["implemented searching gizmos by keyword"], "linked_merge_request": "#10", }, { "bump": LevelBump.NO_RELEASE, "type": "documentation", "scope": "parser", "descriptions": [ "add new parser pattern", ], "linked_merge_request": "#10", }, { "bump": LevelBump.MAJOR, "type": "bug fixes", "scope": "cli", "descriptions": [ "changed option name", "BREAKING CHANGE: A breaking change description", "Closes: #555", # This is a bit unusual but its because there is no identifier that will # identify this as a separate commit so it gets included in the previous commit "* invalid non-conventional formatted commit", ], "breaking_descriptions": [ "A breaking change description", ], "linked_issues": ("#555",), "linked_merge_request": "#10", }, ], ), ] ], ) def test_parser_squashed_commit_github_squash_style( default_conventional_parser: ConventionalCommitParser, make_commit_obj: MakeCommitObjFn, commit_message: str, expected_commit_details: Sequence[dict | None], ): # Setup: Enable squash commit parsing parser = ConventionalCommitParser( options=ConventionalCommitParserOptions( **{ **default_conventional_parser.options.__dict__, "parse_squash_commits": True, } ) ) # Build the commit object and parse it the_commit = make_commit_obj(commit_message) parsed_results = parser.parse(the_commit) # Validate the results assert isinstance(parsed_results, Iterable) assert ( len(expected_commit_details) == len(parsed_results) ), f"Expected {len(expected_commit_details)} parsed results, but got {len(parsed_results)}" for result, expected in zip(parsed_results, expected_commit_details): if expected is None: assert isinstance(result, ParseError) continue assert isinstance(result, ParsedCommit) # Required assert expected["bump"] == result.bump assert expected["type"] == result.type # Optional assert expected.get("scope", "") == result.scope # TODO: v10 change to tuples assert expected.get("descriptions", []) == result.descriptions assert expected.get("breaking_descriptions", []) == result.breaking_descriptions assert expected.get("linked_issues", ()) == result.linked_issues assert expected.get("linked_merge_request", "") == result.linked_merge_request @pytest.mark.parametrize( "commit_message, bump", [ ( "feat(parsers): add new parser pattern\n\nBREAKING CHANGE: change", LevelBump.MAJOR, ), ("feat(parsers)!: add new parser pattern", LevelBump.MAJOR), ( "feat(parsers): add new parser pattern\n\nNew pattern is awesome\n\n" "BREAKING CHANGE: change \n", LevelBump.MAJOR, ), ( "feat(parsers): add new parser pattern\n\nBREAKING-CHANGE: change !", LevelBump.MAJOR, ), ("feat(parser): add emoji parser", LevelBump.MINOR), ("fix(parser): fix regex in conventional parser", LevelBump.PATCH), ("test(parser): add a test for conventional parser", LevelBump.NO_RELEASE), ("feat(parser)!: edit data parsing stuff", LevelBump.MAJOR), ("fix!: edit data parsing stuff again", LevelBump.MAJOR), ("fix: superfix", LevelBump.PATCH), ], ) def test_parser_returns_correct_bump_level( default_conventional_parser: ConventionalCommitParser, commit_message: str, bump: LevelBump, make_commit_obj: MakeCommitObjFn, ): parsed_results = default_conventional_parser.parse(make_commit_obj(commit_message)) assert isinstance(parsed_results, Iterable) assert len(parsed_results) == 1 result = next(iter(parsed_results)) assert isinstance(result, ParsedCommit) assert result.bump is bump @pytest.mark.parametrize( "message, type_", [ ("feat(parser): ...", "features"), ("fix(parser): ...", "bug fixes"), ("test(parser): ...", "testing"), ("docs(parser): ...", "documentation"), ("style(parser): ...", "code style"), ("refactor(parser): ...", "refactoring"), ("chore(parser): ...", "chores"), ], ) def test_parser_return_type_from_commit_message( default_conventional_parser: ConventionalCommitParser, message: str, type_: str, make_commit_obj: MakeCommitObjFn, ): parsed_results = default_conventional_parser.parse(make_commit_obj(message)) assert isinstance(parsed_results, Iterable) assert len(parsed_results) == 1 result = next(iter(parsed_results)) assert isinstance(result, ParsedCommit) assert result.type == type_ @pytest.mark.parametrize( "message, scope", [ ("chore(parser): ...", "parser"), ("chore(a part): ...", "a part"), ("chore(a_part): ...", "a_part"), ("chore(a-part): ...", "a-part"), ("chore(a.part): ...", "a.part"), ("chore(a+part): ...", "a+part"), ("chore(a&part): ...", "a&part"), ("chore((part)): ...", "(part)"), ("chore((p):rt): ...", "(p):rt"), ], ) def test_parser_return_scope_from_commit_message( default_conventional_parser: ConventionalCommitParser, message: str, scope: str, make_commit_obj: MakeCommitObjFn, ): parsed_results = default_conventional_parser.parse(make_commit_obj(message)) assert isinstance(parsed_results, Iterable) assert len(parsed_results) == 1 result = next(iter(parsed_results)) assert isinstance(result, ParsedCommit) assert result.scope == scope _long_text = ( "This is an long explanatory part of a commit message. It should give " "some insight to the fix this commit adds to the codebase." ) _footer = "Closes: #400" @pytest.mark.parametrize( "message, descriptions", [ ("feat(parser): add emoji parser", ["add emoji parser"]), ( "fix(parser): fix regex in conventional parser", ["fix regex in conventional parser"], ), ( "test(parser): add a test for conventional parser", ["add a test for conventional parser"], ), ( f"fix(tox): fix env \n\n{_long_text}\n\n{_footer}", ["fix env ", _long_text, _footer], ), ("fix: superfix", ["superfix"]), ], ) def test_parser_return_subject_from_commit_message( default_conventional_parser: ConventionalCommitParser, message: str, descriptions: list[str], make_commit_obj: MakeCommitObjFn, ): parsed_results = default_conventional_parser.parse(make_commit_obj(message)) assert isinstance(parsed_results, Iterable) assert len(parsed_results) == 1 result = next(iter(parsed_results)) assert isinstance(result, ParsedCommit) assert result.descriptions == descriptions @pytest.mark.parametrize( "message, subject, merge_request_number", # TODO: in v10, we will remove the merge request number from the subject line [ # GitHub, Gitea style ( "feat(parser): add emoji parser (#123)", "add emoji parser (#123)", "#123", ), # GitLab style ( "fix(parser): fix regex in conventional parser (!456)", "fix regex in conventional parser (!456)", "!456", ), # BitBucket style ( "feat(parser): add emoji parser (pull request #123)", "add emoji parser (pull request #123)", "#123", ), # Both a linked merge request and an issue footer (should return the linked merge request) ("fix: superfix (#123)\n\nCloses: #400", "superfix (#123)", "#123"), # None ("fix: superfix", "superfix", ""), # None but includes an issue footer it should not be considered a linked merge request ("fix: superfix\n\nCloses: #400", "superfix", ""), ], ) def test_parser_return_linked_merge_request_from_commit_message( default_conventional_parser: ConventionalCommitParser, message: str, subject: str, merge_request_number: str, make_commit_obj: MakeCommitObjFn, ): parsed_results = default_conventional_parser.parse(make_commit_obj(message)) assert isinstance(parsed_results, Iterable) assert len(parsed_results) == 1 result = next(iter(parsed_results)) assert isinstance(result, ParsedCommit) assert merge_request_number == result.linked_merge_request assert subject == result.descriptions[0] @pytest.mark.parametrize( "message, linked_issues", # TODO: in v10, we will remove the issue reference footers from the descriptions [ *[ # GitHub, Gitea, GitLab style ( f"feat(parser): add magic parser\n\n{footer}", linked_issues, ) for footer_prefix in SUPPORTED_ISSUE_CLOSURE_PREFIXES for footer, linked_issues in [ # Single issue ( f"{footer_prefix.capitalize()}: #555", ["#555"], ), # Git Footer style (capitalized) (f"{footer_prefix.lower()}: #555", ["#555"]), # lowercase prefix (f"{footer_prefix.upper()}: #555", ["#555"]), # uppercase prefix # Mulitple issues (variant 1: list with one prefix, not supported by GitHub) ( f"{footer_prefix}: #555,#444", ["#444", "#555"], ), # Comma separated (w/o space) ( f"{footer_prefix}: #555, #444", ["#444", "#555"], ), # Comma separated (w/ space) ( f"{footer_prefix}: #555 , #444", ["#444", "#555"], ), # Comma separated (w/ extra space) (f"{footer_prefix}: #555 #444", ["#444", "#555"]), # Space separated ( f"{footer_prefix}: #555;#444", ["#444", "#555"], ), # semicolon separated (w/o space) ( f"{footer_prefix}: #555; #444", ["#444", "#555"], ), # semicolon separated (w/ space) ( f"{footer_prefix}: #555 ; #444", ["#444", "#555"], ), # semicolon separated (w/ extra space) ( f"{footer_prefix}: #555/#444", ["#444", "#555"], ), # slash separated (w/o space) ( f"{footer_prefix}: #555/ #444", ["#444", "#555"], ), # slash separated (w/ space) ( f"{footer_prefix}: #555 / #444", ["#444", "#555"], ), # slash separated (w/ extra space) ( f"{footer_prefix}: #555Ƽ", ["#444", "#555"], ), # ampersand separated (w/o space) ( f"{footer_prefix}: #555& #444", ["#444", "#555"], ), # ampersand separated (w/ space) ( f"{footer_prefix}: #555 & #444", ["#444", "#555"], ), # ampersand separated (w/ extra space) (f"{footer_prefix}: #555 and #444", ["#444", "#555"]), # and separated ( f"{footer_prefix}: #555, #444, and #333", ["#333", "#444", "#555"], ), # and separated # Mulitple issues (variant 2: multiple footers, supported by GitHub) (f"{footer_prefix}: #555\n{footer_prefix}: #444", ["#444", "#555"]), # More than 2 issues ( f"{footer_prefix}: #555, #444, #333", ["#333", "#444", "#555"], ), # More than 2 issues (force numerical sort) ( f"{footer_prefix}: #555, #3333, #444", ["#444", "#555", "#3333"], ), # Single issue listed multiple times (f"{footer_prefix}: #555, #555", ["#555"]), # Multiple footers with the same issue (f"{footer_prefix}: #555\n{footer_prefix}: #555", ["#555"]), # Multiple issues via multiple inline git footers (f"{footer_prefix}: #555, {footer_prefix}: #444", ["#444", "#555"]), # Multiple valid footers ( str.join( "\n", [ f"{footer_prefix}: #555", "Signed-off-by: johndoe ", f"{footer_prefix}: #444", ], ), ["#444", "#555"], ), # ----------------------------------------- Invalid Sets ----------------------------------------- # # Must have colon because it is a git footer, these will not return a linked issue (f"{footer_prefix} #666", []), (f"{footer_prefix} #666, #777", []), # Invalid Multiple issues (although it is supported by GitHub, it is not supported by the parser) (f"{footer_prefix} #666, {footer_prefix} #777", []), # Invalid 'and' separation (f"{footer_prefix}: #666and#777", ["#666and#777"]), # Invalid prefix ("ref: #666", []), # body mentions an issue and has a different git footer ( "In #666, the devils in the details...\n\nSigned-off-by: johndoe ", [], ), ] ], *[ # JIRA style ( f"feat(parser): add magic parser\n\n{footer}", linked_issues, ) for footer_prefix in SUPPORTED_ISSUE_CLOSURE_PREFIXES for footer, linked_issues in [ # Single issue ( f"{footer_prefix.capitalize()}: ABC-555", ["ABC-555"], ), # Git Footer style (capitalized) (f"{footer_prefix.lower()}: ABC-555", ["ABC-555"]), # lowercase prefix (f"{footer_prefix.upper()}: ABC-555", ["ABC-555"]), # uppercase prefix # Mulitple issues (variant 1: list with one prefix, not supported by GitHub) ( f"{footer_prefix}: ABC-555,ABC-444", ["ABC-444", "ABC-555"], ), # Comma separated (w/o space) ( f"{footer_prefix}: ABC-555, ABC-444", ["ABC-444", "ABC-555"], ), # Comma separated (w/ space) ( f"{footer_prefix}: ABC-555 , ABC-444", ["ABC-444", "ABC-555"], ), # Comma separated (w/ extra space) ( f"{footer_prefix}: ABC-555 ABC-444", ["ABC-444", "ABC-555"], ), # Space separated ( f"{footer_prefix}: ABC-555;ABC-444", ["ABC-444", "ABC-555"], ), # semicolon separated (w/o space) ( f"{footer_prefix}: ABC-555; ABC-444", ["ABC-444", "ABC-555"], ), # semicolon separated (w/ space) ( f"{footer_prefix}: ABC-555 ; ABC-444", ["ABC-444", "ABC-555"], ), # semicolon separated (w/ extra space) ( f"{footer_prefix}: ABC-555/ABC-444", ["ABC-444", "ABC-555"], ), # slash separated (w/o space) ( f"{footer_prefix}: ABC-555/ ABC-444", ["ABC-444", "ABC-555"], ), # slash separated (w/ space) ( f"{footer_prefix}: ABC-555 / ABC-444", ["ABC-444", "ABC-555"], ), # slash separated (w/ extra space) ( f"{footer_prefix}: ABC-555&ABC-444", ["ABC-444", "ABC-555"], ), # ampersand separated (w/o space) ( f"{footer_prefix}: ABC-555& ABC-444", ["ABC-444", "ABC-555"], ), # ampersand separated (w/ space) ( f"{footer_prefix}: ABC-555 & ABC-444", ["ABC-444", "ABC-555"], ), # ampersand separated (w/ extra space) ( f"{footer_prefix}: ABC-555 and ABC-444", ["ABC-444", "ABC-555"], ), # and separated ( f"{footer_prefix}: ABC-555, ABC-444, and ABC-333", ["ABC-333", "ABC-444", "ABC-555"], ), # and separated # Mulitple issues (variant 2: multiple footers, supported by GitHub) ( f"{footer_prefix}: ABC-555\n{footer_prefix}: ABC-444", ["ABC-444", "ABC-555"], ), # More than 2 issues ( f"{footer_prefix}: ABC-555, ABC-444, ABC-333", ["ABC-333", "ABC-444", "ABC-555"], ), # More than 2 issues (force numerical sort) ( f"{footer_prefix}: ABC-555, ABC-3333, ABC-444", ["ABC-444", "ABC-555", "ABC-3333"], ), # Single issue listed multiple times (f"{footer_prefix}: ABC-555, ABC-555", ["ABC-555"]), # Multiple footers with the same issue (f"{footer_prefix}: ABC-555\n{footer_prefix}: ABC-555", ["ABC-555"]), # Multiple issues via multiple inline git footers ( f"{footer_prefix}: ABC-666, {footer_prefix}: ABC-777", ["ABC-666", "ABC-777"], ), # Multiple valid footers ( str.join( "\n", [ f"{footer_prefix}: ABC-555", "Signed-off-by: johndoe ", f"{footer_prefix}: ABC-444", ], ), ["ABC-444", "ABC-555"], ), # ----------------------------------------- Invalid Sets ----------------------------------------- # # Must have colon because it is a git footer, these will not return a linked issue (f"{footer_prefix} ABC-666", []), (f"{footer_prefix} ABC-666, ABC-777", []), # Invalid Multiple issues (although it is supported by GitHub, it is not supported by the parser) (f"{footer_prefix} ABC-666, {footer_prefix} ABC-777", []), # Invalid 'and' separation (f"{footer_prefix}: ABC-666andABC-777", ["ABC-666andABC-777"]), # Invalid prefix ("ref: ABC-666", []), # body mentions an issue and has a different git footer ( "In ABC-666, the devils in the details...\n\nSigned-off-by: johndoe ", [], ), ] ], *[ ( f"feat(parser): add magic parser\n\n{footer}", linked_issues, ) for footer, linked_issues in [ # Multiple footers with the same issue but different prefixes ("Resolves: #555\nfix: #444", ["#444", "#555"]), # Whitespace padded footer (" Resolves: #555\n", ["#555"]), ] ], ( # Only grabs the issue reference when there is a GitHub PR reference in the subject "feat(parser): add magic parser (#123)\n\nCloses: #555", ["#555"], ), # Does not grab an issue when there is only a GitHub PR reference in the subject ("feat(parser): add magic parser (#123)", []), # Does not grab an issue when there is only a Bitbucket PR reference in the subject ("feat(parser): add magic parser (pull request #123)", []), ], ) def test_parser_return_linked_issues_from_commit_message( default_conventional_parser: ConventionalCommitParser, message: str, linked_issues: Sequence[str], make_commit_obj: MakeCommitObjFn, ): parsed_results = default_conventional_parser.parse(make_commit_obj(message)) assert isinstance(parsed_results, Iterable) assert len(parsed_results) == 1 result = next(iter(parsed_results)) assert isinstance(result, ParsedCommit) assert tuple(linked_issues) == result.linked_issues @pytest.mark.parametrize( "message, notices", [ pytest.param( message, notices, id=test_id, ) for test_id, message, notices in [ ( "single notice", dedent( """\ fix(parser): fix regex in conventional parser NOTICE: This is a notice """ ), ["This is a notice"], ), ( "multiline notice", dedent( """\ fix(parser): fix regex in conventional parser NOTICE: This is a notice that is longer than other notices """ ), ["This is a notice that is longer than other notices"], ), ( "multiple notices", dedent( """\ fix(parser): fix regex in conventional parser NOTICE: This is a notice NOTICE: This is a second notice """ ), ["This is a notice", "This is a second notice"], ), ( "notice with other footer", dedent( """\ fix(parser): fix regex in conventional parser BREAKING CHANGE: This is a breaking change NOTICE: This is a notice """ ), ["This is a notice"], ), ] ], ) def test_parser_return_release_notices_from_commit_message( default_conventional_parser: ConventionalCommitParser, message: str, notices: Sequence[str], make_commit_obj: MakeCommitObjFn, ): parsed_results = default_conventional_parser.parse(make_commit_obj(message)) assert isinstance(parsed_results, Iterable) assert len(parsed_results) == 1 result = next(iter(parsed_results)) assert isinstance(result, ParsedCommit) assert tuple(notices) == result.release_notices # TODO: v10, remove this # full_description = str.join("\n\n", result.descriptions) # full_notice = str.join("\n\n", result.release_notices) # assert full_notice not in full_description ############################## # test custom parser options # ############################## def test_parser_custom_default_level(make_commit_obj: MakeCommitObjFn): options = ConventionalCommitParserOptions(default_bump_level=LevelBump.MINOR) parsed_results = ConventionalCommitParser(options).parse( make_commit_obj("test(parser): add a test for conventional parser") ) assert isinstance(parsed_results, Iterable) result = next(iter(parsed_results)) assert isinstance(result, ParsedCommit) assert result.bump is LevelBump.MINOR def test_parser_custom_allowed_types( default_conventional_parser: ConventionalCommitParser, make_commit_obj: MakeCommitObjFn, ): new_tag = "custom" custom_allowed_tags = [*default_conventional_parser.options.allowed_tags, new_tag] parser = ConventionalCommitParser( options=ConventionalCommitParserOptions( allowed_tags=tuple(custom_allowed_tags), ) ) for commit_type, commit_msg in [ (new_tag, f"{new_tag}: ..."), # no scope (new_tag, f"{new_tag}(parser): ..."), # with scope ("chores", "chore(parser): ..."), # existing, non-release tag ]: parsed_results = parser.parse(make_commit_obj(commit_msg)) assert isinstance(parsed_results, Iterable) result = next(iter(parsed_results)) assert isinstance(result, ParsedCommit) assert result.type == commit_type assert result.bump is LevelBump.NO_RELEASE def test_parser_custom_allowed_types_ignores_non_types( default_conventional_parser: ConventionalCommitParser, make_commit_obj: MakeCommitObjFn, ): banned_tag = "feat" custom_allowed_tags = [*default_conventional_parser.options.allowed_tags] custom_allowed_tags.remove(banned_tag) parser = ConventionalCommitParser( options=ConventionalCommitParserOptions( allowed_tags=tuple(custom_allowed_tags), ) ) parsed_results = parser.parse(make_commit_obj(f"{banned_tag}(parser): ...")) assert isinstance(parsed_results, Iterable) result = next(iter(parsed_results)) assert isinstance(result, ParseError) def test_parser_custom_minor_tags(make_commit_obj: MakeCommitObjFn): custom_minor_tag = "docs" parser = ConventionalCommitParser( options=ConventionalCommitParserOptions(minor_tags=(custom_minor_tag,)) ) parsed_results = parser.parse(make_commit_obj(f"{custom_minor_tag}: ...")) assert isinstance(parsed_results, Iterable) result = next(iter(parsed_results)) assert isinstance(result, ParsedCommit) assert result.bump is LevelBump.MINOR def test_parser_custom_patch_tags(make_commit_obj: MakeCommitObjFn): custom_patch_tag = "test" parser = ConventionalCommitParser( options=ConventionalCommitParserOptions(patch_tags=(custom_patch_tag,)) ) parsed_results = parser.parse(make_commit_obj(f"{custom_patch_tag}: ...")) assert isinstance(parsed_results, Iterable) result = next(iter(parsed_results)) assert isinstance(result, ParsedCommit) assert result.bump is LevelBump.PATCH def test_parser_ignore_merge_commit( default_conventional_parser: ConventionalCommitParser, make_commit_obj: MakeCommitObjFn, ): # Setup: Enable parsing of linked issues parser = ConventionalCommitParser( options=ConventionalCommitParserOptions( **{ **default_conventional_parser.options.__dict__, "ignore_merge_commits": True, } ) ) base_commit = make_commit_obj("Merge branch 'fix/fix-feature' into 'main'") incomming_commit = make_commit_obj("feat: add a new feature") # Setup: Create a merge commit merge_commit = make_commit_obj("Merge branch 'feat/add-new-feature' into 'main'") merge_commit.parents = [base_commit, incomming_commit] # Action parsed_result = parser.parse(merge_commit) assert isinstance(parsed_result, ParseError) assert "Ignoring merge commit" in parsed_result.error python-semantic-release-9.21.0/tests/unit/semantic_release/commit_parser/test_emoji.py000066400000000000000000001231721475670435200313720ustar00rootroot00000000000000from __future__ import annotations from textwrap import dedent from typing import TYPE_CHECKING, Iterable, Sequence import pytest from semantic_release.commit_parser.emoji import EmojiCommitParser, EmojiParserOptions from semantic_release.commit_parser.token import ParsedCommit, ParseError from semantic_release.enums import LevelBump from tests.const import SUPPORTED_ISSUE_CLOSURE_PREFIXES if TYPE_CHECKING: from tests.conftest import MakeCommitObjFn @pytest.mark.parametrize( "commit_message, bump, type_, descriptions, breaking_descriptions", [ # Major bump ( ":boom: Breaking changes\n\nMore description\n\nEven more description", LevelBump.MAJOR, ":boom:", [":boom: Breaking changes", "More description", "Even more description"], ["More description", "Even more description"], ), # Minor bump ( ":sparkles: Add a new feature\n\nSome description of the feature", LevelBump.MINOR, ":sparkles:", [":sparkles: Add a new feature", "Some description of the feature"], [], ), # Patch bump ( ":bug: Fixing a bug\n\nThe bug is finally gone!", LevelBump.PATCH, ":bug:", [":bug: Fixing a bug", "The bug is finally gone!"], [], ), # No release ( ":pencil: Documentation changes", LevelBump.NO_RELEASE, "Other", [":pencil: Documentation changes"], [], ), # Multiple emojis ( ":sparkles::pencil: Add a feature and document it", LevelBump.MINOR, ":sparkles:", [":sparkles::pencil: Add a feature and document it"], [], ), # Emoji in description ( ":sparkles: Add a new feature\n\n:boom: should not be detected", LevelBump.MINOR, ":sparkles:", [":sparkles: Add a new feature", ":boom: should not be detected"], [], ), ], ) def test_default_emoji_parser( default_emoji_parser: EmojiCommitParser, commit_message: str, bump: LevelBump, type_: str, descriptions: list[str], breaking_descriptions: list[str], make_commit_obj: MakeCommitObjFn, ): commit = make_commit_obj(commit_message) parsed_results = default_emoji_parser.parse(commit) assert isinstance(parsed_results, Iterable) result = next(iter(parsed_results)) assert isinstance(result, ParsedCommit) assert bump is result.bump assert type_ == result.type assert descriptions == result.descriptions assert breaking_descriptions == result.breaking_descriptions @pytest.mark.parametrize( "message, subject, merge_request_number", # TODO: in v10, we will remove the merge request number from the subject line [ # GitHub, Gitea style ( ":sparkles: add new feature (#123)", ":sparkles: add new feature (#123)", "#123", ), # GitLab style ( ":bug: fix regex in parser (!456)", ":bug: fix regex in parser (!456)", "!456", ), # BitBucket style ( ":sparkles: add new feature (pull request #123)", ":sparkles: add new feature (pull request #123)", "#123", ), # Both a linked merge request and an issue footer (should return the linked merge request) (":bug: superfix (#123)\n\nCloses: #400", ":bug: superfix (#123)", "#123"), # None (":bug: superfix", ":bug: superfix", ""), # None but includes an issue footer it should not be considered a linked merge request (":bug: superfix\n\nCloses: #400", ":bug: superfix", ""), ], ) def test_parser_return_linked_merge_request_from_commit_message( default_emoji_parser: EmojiCommitParser, message: str, subject: str, merge_request_number: str, make_commit_obj: MakeCommitObjFn, ): parsed_results = default_emoji_parser.parse(make_commit_obj(message)) assert isinstance(parsed_results, Iterable) result = next(iter(parsed_results)) assert isinstance(result, ParsedCommit) assert merge_request_number == result.linked_merge_request assert subject == result.descriptions[0] @pytest.mark.parametrize( "message, linked_issues", # TODO: in v10, we will remove the issue reference footers from the descriptions [ *[ # GitHub, Gitea, GitLab style ( f":sparkles: (parser) add magic parser\n\n{footer}", linked_issues, ) for footer_prefix in SUPPORTED_ISSUE_CLOSURE_PREFIXES for footer, linked_issues in [ # Single issue ( f"{footer_prefix.capitalize()}: #555", ["#555"], ), # Git Footer style (capitalized) (f"{footer_prefix.lower()}: #555", ["#555"]), # lowercase prefix (f"{footer_prefix.upper()}: #555", ["#555"]), # uppercase prefix # Mulitple issues (variant 1: list with one prefix, not supported by GitHub) ( f"{footer_prefix}: #555,#444", ["#444", "#555"], ), # Comma separated (w/o space) ( f"{footer_prefix}: #555, #444", ["#444", "#555"], ), # Comma separated (w/ space) ( f"{footer_prefix}: #555 , #444", ["#444", "#555"], ), # Comma separated (w/ extra space) (f"{footer_prefix}: #555 #444", ["#444", "#555"]), # Space separated ( f"{footer_prefix}: #555;#444", ["#444", "#555"], ), # semicolon separated (w/o space) ( f"{footer_prefix}: #555; #444", ["#444", "#555"], ), # semicolon separated (w/ space) ( f"{footer_prefix}: #555 ; #444", ["#444", "#555"], ), # semicolon separated (w/ extra space) ( f"{footer_prefix}: #555/#444", ["#444", "#555"], ), # slash separated (w/o space) ( f"{footer_prefix}: #555/ #444", ["#444", "#555"], ), # slash separated (w/ space) ( f"{footer_prefix}: #555 / #444", ["#444", "#555"], ), # slash separated (w/ extra space) ( f"{footer_prefix}: #555Ƽ", ["#444", "#555"], ), # ampersand separated (w/o space) ( f"{footer_prefix}: #555& #444", ["#444", "#555"], ), # ampersand separated (w/ space) ( f"{footer_prefix}: #555 & #444", ["#444", "#555"], ), # ampersand separated (w/ extra space) (f"{footer_prefix}: #555 and #444", ["#444", "#555"]), # and separated ( f"{footer_prefix}: #555, #444, and #333", ["#333", "#444", "#555"], ), # and separated # Mulitple issues (variant 2: multiple footers, supported by GitHub) (f"{footer_prefix}: #555\n{footer_prefix}: #444", ["#444", "#555"]), # More than 2 issues ( f"{footer_prefix}: #555, #444, #333", ["#333", "#444", "#555"], ), # More than 2 issues (force numerical sort) ( f"{footer_prefix}: #555, #3333, #444", ["#444", "#555", "#3333"], ), # Single issue listed multiple times (f"{footer_prefix}: #555, #555", ["#555"]), # Multiple footers with the same issue (f"{footer_prefix}: #555\n{footer_prefix}: #555", ["#555"]), # Multiple issues via multiple inline git footers (f"{footer_prefix}: #555, {footer_prefix}: #444", ["#444", "#555"]), # Multiple valid footers ( str.join( "\n", [ f"{footer_prefix}: #555", "Signed-off-by: johndoe ", f"{footer_prefix}: #444", ], ), ["#444", "#555"], ), # ----------------------------------------- Invalid Sets ----------------------------------------- # # Must have colon because it is a git footer, these will not return a linked issue (f"{footer_prefix} #666", []), (f"{footer_prefix} #666, #777", []), # Invalid Multiple issues (although it is supported by GitHub, it is not supported by the parser) (f"{footer_prefix} #666, {footer_prefix} #777", []), # Invalid 'and' separation (f"{footer_prefix}: #666and#777", ["#666and#777"]), # Invalid prefix ("ref: #666", []), # body mentions an issue and has a different git footer ( "In #666, the devils in the details...\n\nSigned-off-by: johndoe ", [], ), ] ], *[ # JIRA style ( f":sparkles: (parser) add magic parser\n\n{footer}", linked_issues, ) for footer_prefix in SUPPORTED_ISSUE_CLOSURE_PREFIXES for footer, linked_issues in [ # Single issue ( f"{footer_prefix.capitalize()}: ABC-555", ["ABC-555"], ), # Git Footer style (capitalized) (f"{footer_prefix.lower()}: ABC-555", ["ABC-555"]), # lowercase prefix (f"{footer_prefix.upper()}: ABC-555", ["ABC-555"]), # uppercase prefix # Mulitple issues (variant 1: list with one prefix, not supported by GitHub) ( f"{footer_prefix}: ABC-555,ABC-444", ["ABC-444", "ABC-555"], ), # Comma separated (w/o space) ( f"{footer_prefix}: ABC-555, ABC-444", ["ABC-444", "ABC-555"], ), # Comma separated (w/ space) ( f"{footer_prefix}: ABC-555 , ABC-444", ["ABC-444", "ABC-555"], ), # Comma separated (w/ extra space) ( f"{footer_prefix}: ABC-555 ABC-444", ["ABC-444", "ABC-555"], ), # Space separated ( f"{footer_prefix}: ABC-555;ABC-444", ["ABC-444", "ABC-555"], ), # semicolon separated (w/o space) ( f"{footer_prefix}: ABC-555; ABC-444", ["ABC-444", "ABC-555"], ), # semicolon separated (w/ space) ( f"{footer_prefix}: ABC-555 ; ABC-444", ["ABC-444", "ABC-555"], ), # semicolon separated (w/ extra space) ( f"{footer_prefix}: ABC-555/ABC-444", ["ABC-444", "ABC-555"], ), # slash separated (w/o space) ( f"{footer_prefix}: ABC-555/ ABC-444", ["ABC-444", "ABC-555"], ), # slash separated (w/ space) ( f"{footer_prefix}: ABC-555 / ABC-444", ["ABC-444", "ABC-555"], ), # slash separated (w/ extra space) ( f"{footer_prefix}: ABC-555&ABC-444", ["ABC-444", "ABC-555"], ), # ampersand separated (w/o space) ( f"{footer_prefix}: ABC-555& ABC-444", ["ABC-444", "ABC-555"], ), # ampersand separated (w/ space) ( f"{footer_prefix}: ABC-555 & ABC-444", ["ABC-444", "ABC-555"], ), # ampersand separated (w/ extra space) ( f"{footer_prefix}: ABC-555 and ABC-444", ["ABC-444", "ABC-555"], ), # and separated ( f"{footer_prefix}: ABC-555, ABC-444, and ABC-333", ["ABC-333", "ABC-444", "ABC-555"], ), # and separated # Mulitple issues (variant 2: multiple footers, supported by GitHub) ( f"{footer_prefix}: ABC-555\n{footer_prefix}: ABC-444", ["ABC-444", "ABC-555"], ), # More than 2 issues ( f"{footer_prefix}: ABC-555, ABC-444, ABC-333", ["ABC-333", "ABC-444", "ABC-555"], ), # More than 2 issues (force numerical sort) ( f"{footer_prefix}: ABC-555, ABC-3333, ABC-444", ["ABC-444", "ABC-555", "ABC-3333"], ), # Single issue listed multiple times (f"{footer_prefix}: ABC-555, ABC-555", ["ABC-555"]), # Multiple footers with the same issue (f"{footer_prefix}: ABC-555\n{footer_prefix}: ABC-555", ["ABC-555"]), # Multiple issues via multiple inline git footers ( f"{footer_prefix}: ABC-666, {footer_prefix}: ABC-777", ["ABC-666", "ABC-777"], ), # Multiple valid footers ( str.join( "\n", [ f"{footer_prefix}: ABC-555", "Signed-off-by: johndoe ", f"{footer_prefix}: ABC-444", ], ), ["ABC-444", "ABC-555"], ), # ----------------------------------------- Invalid Sets ----------------------------------------- # # Must have colon because it is a git footer, these will not return a linked issue (f"{footer_prefix} ABC-666", []), (f"{footer_prefix} ABC-666, ABC-777", []), # Invalid Multiple issues (although it is supported by GitHub, it is not supported by the parser) (f"{footer_prefix} ABC-666, {footer_prefix} ABC-777", []), # Invalid 'and' separation (f"{footer_prefix}: ABC-666andABC-777", ["ABC-666andABC-777"]), # Invalid prefix ("ref: ABC-666", []), # body mentions an issue and has a different git footer ( "In ABC-666, the devils in the details...\n\nSigned-off-by: johndoe ", [], ), ] ], *[ ( f":sparkles: (parser) add magic parser\n\n{footer}", linked_issues, ) for footer, linked_issues in [ # Multiple footers with the same issue but different prefixes ("Resolves: #555\nfix: #444", ["#444", "#555"]), # Whitespace padded footer (" Resolves: #555\n", ["#555"]), ] ], ( # Only grabs the issue reference when there is a GitHub PR reference in the subject ":sparkles: (parser) add magic parser (#123)\n\nCloses: #555", ["#555"], ), # Does not grab an issue when there is only a GitHub PR reference in the subject (":sparkles: (parser) add magic parser (#123)", []), # Does not grab an issue when there is only a Bitbucket PR reference in the subject (":sparkles: (parser) add magic parser (pull request #123)", []), ], ) def test_parser_return_linked_issues_from_commit_message( default_emoji_parser: EmojiCommitParser, message: str, linked_issues: Sequence[str], make_commit_obj: MakeCommitObjFn, ): # Setup: Enable parsing of linked issues parser = EmojiCommitParser( options=EmojiParserOptions( **{ **default_emoji_parser.options.__dict__, "parse_linked_issues": True, } ) ) # Action parsed_results = parser.parse(make_commit_obj(message)) assert isinstance(parsed_results, Iterable) assert len(parsed_results) == 1 # Evaluate (expected -> actual) result = next(iter(parsed_results)) assert isinstance(result, ParsedCommit) assert tuple(linked_issues) == result.linked_issues @pytest.mark.parametrize( "message, notices", [ pytest.param( message, notices, id=test_id, ) for test_id, message, notices in [ ( "single notice", dedent( """\ :bug:(parser): fix regex in emoji parser NOTICE: This is a notice """ ), ["This is a notice"], ), ( "multiline notice", dedent( """\ :bug:(parser): fix regex in emoji parser NOTICE: This is a notice that is longer than other notices """ ), ["This is a notice that is longer than other notices"], ), ( "multiple notices", dedent( """\ :bug:(parser): fix regex in emoji parser NOTICE: This is a notice NOTICE: This is a second notice """ ), ["This is a notice", "This is a second notice"], ), ( "notice with other footer", dedent( """\ :bug:(parser): fix regex in emoji parser BREAKING CHANGE: This is a breaking change NOTICE: This is a notice """ ), ["This is a notice"], ), ] ], ) def test_parser_return_release_notices_from_commit_message( default_emoji_parser: EmojiCommitParser, message: str, notices: Sequence[str], make_commit_obj: MakeCommitObjFn, ): parsed_results = default_emoji_parser.parse(make_commit_obj(message)) assert isinstance(parsed_results, Iterable) assert len(parsed_results) == 1 result = next(iter(parsed_results)) assert isinstance(result, ParsedCommit) assert tuple(notices) == result.release_notices # TODO: v10, remove this # full_description = str.join("\n\n", result.descriptions) # full_notice = str.join("\n\n", result.release_notices) # assert full_notice not in full_description @pytest.mark.parametrize( "commit_message, expected_commit_details", [ pytest.param( commit_message, expected_commit_details, id=test_id, ) for test_id, commit_message, expected_commit_details in [ ( "Single commit squashed via BitBucket PR resolution", dedent( """\ Merged in feat/my-awesome-stuff (pull request #10) :bug:(release-config): some commit subject An additional description Second paragraph with multiple lines that will be condensed Resolves: #12 Signed-off-by: author """ ), [ { "bump": LevelBump.NO_RELEASE, "type": "Other", "descriptions": [ "Merged in feat/my-awesome-stuff (pull request #10)" ], "linked_merge_request": "#10", }, { "bump": LevelBump.PATCH, "type": ":bug:", "scope": "release-config", "descriptions": [ ":bug:(release-config): some commit subject", "An additional description", "Second paragraph with multiple lines that will be condensed", "Resolves: #12", "Signed-off-by: author ", ], "linked_issues": ("#12",), "linked_merge_request": "#10", }, ], ), ( "Multiple commits squashed via BitBucket PR resolution", dedent( """\ Merged in feat/my-awesome-stuff (pull request #10) :bug:(release-config): some commit subject An additional description Second paragraph with multiple lines that will be condensed Resolves: #12 Signed-off-by: author :sparkles: implemented searching gizmos by keyword :memo:(parser): add new parser pattern :boom::bug: changed option name A breaking change description Closes: #555 invalid non-conventional formatted commit """ ), [ { "bump": LevelBump.NO_RELEASE, "type": "Other", "descriptions": [ "Merged in feat/my-awesome-stuff (pull request #10)" ], "linked_merge_request": "#10", }, { "bump": LevelBump.PATCH, "type": ":bug:", "scope": "release-config", "descriptions": [ ":bug:(release-config): some commit subject", "An additional description", "Second paragraph with multiple lines that will be condensed", "Resolves: #12", "Signed-off-by: author ", ], "linked_issues": ("#12",), "linked_merge_request": "#10", }, { "bump": LevelBump.MINOR, "type": ":sparkles:", "descriptions": [ ":sparkles: implemented searching gizmos by keyword" ], "linked_merge_request": "#10", }, { "bump": LevelBump.NO_RELEASE, "type": ":memo:", "scope": "parser", "descriptions": [ ":memo:(parser): add new parser pattern", ], "linked_merge_request": "#10", }, { "bump": LevelBump.MAJOR, "type": ":boom:", "scope": "", "descriptions": [ ":boom::bug: changed option name", "A breaking change description", "Closes: #555", # This is a bit unusual but its because there is no identifier that will # identify this as a separate commit so it gets included in the previous commit "invalid non-conventional formatted commit", ], "breaking_descriptions": [ "A breaking change description", "Closes: #555", # This is a bit unusual but its because there is no identifier that will # identify this as a separate commit so it gets included in the previous commit "invalid non-conventional formatted commit", ], "linked_issues": ("#555",), "linked_merge_request": "#10", }, ], ), ] ], ) def test_parser_squashed_commit_bitbucket_squash_style( default_emoji_parser: EmojiCommitParser, make_commit_obj: MakeCommitObjFn, commit_message: str, expected_commit_details: Sequence[dict | None], ): # Setup: Enable squash commit parsing parser = EmojiCommitParser( options=EmojiParserOptions( **{ **default_emoji_parser.options.__dict__, "parse_squash_commits": True, "parse_linked_issues": True, } ) ) # Build the commit object and parse it the_commit = make_commit_obj(commit_message) parsed_results = parser.parse(the_commit) # Validate the results assert isinstance(parsed_results, Iterable) assert ( len(expected_commit_details) == len(parsed_results) ), f"Expected {len(expected_commit_details)} parsed results, but got {len(parsed_results)}" for result, expected in zip(parsed_results, expected_commit_details): if expected is None: assert isinstance(result, ParseError) continue assert isinstance(result, ParsedCommit) # Required assert expected["bump"] == result.bump assert expected["type"] == result.type # Optional assert expected.get("scope", "") == result.scope # TODO: v10 change to tuples assert expected.get("descriptions", []) == result.descriptions assert expected.get("breaking_descriptions", []) == result.breaking_descriptions assert expected.get("linked_issues", ()) == result.linked_issues assert expected.get("linked_merge_request", "") == result.linked_merge_request @pytest.mark.parametrize( "commit_message, expected_commit_details", [ pytest.param( commit_message, expected_commit_details, id=test_id, ) for test_id, commit_message, expected_commit_details in [ ( "Single commit squashed via manual Git squash merge", dedent( """\ Squashed commit of the following: commit 63ec09b9e844e616dcaa7bae35a0b66671b59fbb Author: author Date: Sun Jan 19 12:05:23 2025 +0000 :bug:(release-config): some commit subject An additional description Second paragraph with multiple lines that will be condensed Resolves: #12 Signed-off-by: author """ ), [ { "bump": LevelBump.PATCH, "type": ":bug:", "scope": "release-config", "descriptions": [ ":bug:(release-config): some commit subject", "An additional description", "Second paragraph with multiple lines that will be condensed", "Resolves: #12", "Signed-off-by: author ", ], "linked_issues": ("#12",), } ], ), ( "Multiple commits squashed via manual Git squash merge", dedent( """\ Squashed commit of the following: commit 63ec09b9e844e616dcaa7bae35a0b66671b59fbb Author: author Date: Sun Jan 19 12:05:23 2025 +0000 :bug:(release-config): some commit subject An additional description Second paragraph with multiple lines that will be condensed Resolves: #12 Signed-off-by: author commit 1f34769bf8352131ad6f4879b8c47becf3c7aa69 Author: author Date: Sat Jan 18 10:13:53 2025 +0000 :sparkles: implemented searching gizmos by keyword commit b2334a64a11ef745a17a2a4034f651e08e8c45a6 Author: author Date: Sat Jan 18 10:13:53 2025 +0000 :memo:(parser): add new parser pattern commit 5f0292fb5a88c3a46e4a02bec35b85f5228e8e51 Author: author Date: Sat Jan 18 10:13:53 2025 +0000 :boom::bug: changed option name A breaking change description Closes: #555 commit 2f314e7924be161cfbf220d3b6e2a6189a3b5609 Author: author Date: Sat Jan 18 10:13:53 2025 +0000 invalid non-conventional formatted commit """ ), [ { "bump": LevelBump.PATCH, "type": ":bug:", "scope": "release-config", "descriptions": [ ":bug:(release-config): some commit subject", "An additional description", "Second paragraph with multiple lines that will be condensed", "Resolves: #12", "Signed-off-by: author ", ], "linked_issues": ("#12",), }, { "bump": LevelBump.MINOR, "type": ":sparkles:", "descriptions": [ ":sparkles: implemented searching gizmos by keyword" ], }, { "bump": LevelBump.NO_RELEASE, "type": ":memo:", "scope": "parser", "descriptions": [ ":memo:(parser): add new parser pattern", ], }, { "bump": LevelBump.MAJOR, "type": ":boom:", "descriptions": [ ":boom::bug: changed option name", "A breaking change description", "Closes: #555", ], "breaking_descriptions": [ "A breaking change description", "Closes: #555", ], "linked_issues": ("#555",), }, { "bump": LevelBump.NO_RELEASE, "type": "Other", "descriptions": ["invalid non-conventional formatted commit"], }, ], ), ] ], ) def test_parser_squashed_commit_git_squash_style( default_emoji_parser: EmojiCommitParser, make_commit_obj: MakeCommitObjFn, commit_message: str, expected_commit_details: Sequence[dict | None], ): # Setup: Enable squash commit parsing parser = EmojiCommitParser( options=EmojiParserOptions( **{ **default_emoji_parser.options.__dict__, "parse_squash_commits": True, "parse_linked_issues": True, } ) ) # Build the commit object and parse it the_commit = make_commit_obj(commit_message) parsed_results = parser.parse(the_commit) # Validate the results assert isinstance(parsed_results, Iterable) assert ( len(expected_commit_details) == len(parsed_results) ), f"Expected {len(expected_commit_details)} parsed results, but got {len(parsed_results)}" for result, expected in zip(parsed_results, expected_commit_details): if expected is None: assert isinstance(result, ParseError) continue assert isinstance(result, ParsedCommit) # Required assert expected["bump"] == result.bump assert expected["type"] == result.type # Optional assert expected.get("scope", "") == result.scope # TODO: v10 change to tuples assert expected.get("descriptions", []) == result.descriptions assert expected.get("breaking_descriptions", []) == result.breaking_descriptions assert expected.get("linked_issues", ()) == result.linked_issues assert expected.get("linked_merge_request", "") == result.linked_merge_request @pytest.mark.parametrize( "commit_message, expected_commit_details", [ pytest.param( commit_message, expected_commit_details, id=test_id, ) for test_id, commit_message, expected_commit_details in [ ( "Single commit squashed via GitHub PR resolution", dedent( """\ :bug:(release-config): some commit subject (#10) An additional description Second paragraph with multiple lines that will be condensed Resolves: #12 Signed-off-by: author """ ), [ { "bump": LevelBump.PATCH, "type": ":bug:", "scope": "release-config", "descriptions": [ # TODO: v10 removal of PR number from subject ":bug:(release-config): some commit subject (#10)", "An additional description", "Second paragraph with multiple lines that will be condensed", "Resolves: #12", "Signed-off-by: author ", ], "linked_issues": ("#12",), "linked_merge_request": "#10", }, ], ), ( "Multiple commits squashed via GitHub PR resolution", dedent( """\ :bug:(release-config): some commit subject (#10) An additional description Second paragraph with multiple lines that will be condensed Resolves: #12 Signed-off-by: author * :sparkles: implemented searching gizmos by keyword * :memo:(parser): add new parser pattern * :boom::bug: changed option name A breaking change description Closes: #555 * invalid non-conventional formatted commit """ ), [ { "bump": LevelBump.PATCH, "type": ":bug:", "scope": "release-config", "descriptions": [ # TODO: v10 removal of PR number from subject ":bug:(release-config): some commit subject (#10)", "An additional description", "Second paragraph with multiple lines that will be condensed", "Resolves: #12", "Signed-off-by: author ", ], "linked_issues": ("#12",), "linked_merge_request": "#10", }, { "bump": LevelBump.MINOR, "type": ":sparkles:", "descriptions": [ ":sparkles: implemented searching gizmos by keyword" ], "linked_merge_request": "#10", }, { "bump": LevelBump.NO_RELEASE, "type": ":memo:", "scope": "parser", "descriptions": [ ":memo:(parser): add new parser pattern", ], "linked_merge_request": "#10", }, { "bump": LevelBump.MAJOR, "type": ":boom:", "scope": "", "descriptions": [ ":boom::bug: changed option name", "A breaking change description", "Closes: #555", # This is a bit unusual but its because there is no identifier that will # identify this as a separate commit so it gets included in the previous commit "* invalid non-conventional formatted commit", ], "breaking_descriptions": [ "A breaking change description", "Closes: #555", "* invalid non-conventional formatted commit", ], "linked_issues": ("#555",), "linked_merge_request": "#10", }, ], ), ] ], ) def test_parser_squashed_commit_github_squash_style( default_emoji_parser: EmojiCommitParser, make_commit_obj: MakeCommitObjFn, commit_message: str, expected_commit_details: Sequence[dict | None], ): # Setup: Enable squash commit parsing parser = EmojiCommitParser( options=EmojiParserOptions( **{ **default_emoji_parser.options.__dict__, "parse_squash_commits": True, "parse_linked_issues": True, } ) ) # Build the commit object and parse it the_commit = make_commit_obj(commit_message) parsed_results = parser.parse(the_commit) # Validate the results assert isinstance(parsed_results, Iterable) assert ( len(expected_commit_details) == len(parsed_results) ), f"Expected {len(expected_commit_details)} parsed results, but got {len(parsed_results)}" for result, expected in zip(parsed_results, expected_commit_details): if expected is None: assert isinstance(result, ParseError) continue assert isinstance(result, ParsedCommit) # Required assert expected["bump"] == result.bump assert expected["type"] == result.type # Optional assert expected.get("scope", "") == result.scope # TODO: v10 change to tuples assert expected.get("descriptions", []) == result.descriptions assert expected.get("breaking_descriptions", []) == result.breaking_descriptions assert expected.get("linked_issues", ()) == result.linked_issues assert expected.get("linked_merge_request", "") == result.linked_merge_request def test_parser_ignore_merge_commit( default_emoji_parser: EmojiCommitParser, make_commit_obj: MakeCommitObjFn, ): # Setup: Enable parsing of linked issues parser = EmojiCommitParser( options=EmojiParserOptions( **{ **default_emoji_parser.options.__dict__, "ignore_merge_commits": True, } ) ) base_commit = make_commit_obj("Merge branch 'fix/fix-feature' into 'main'") incomming_commit = make_commit_obj("feat: add a new feature") # Setup: Create a merge commit merge_commit = make_commit_obj("Merge branch 'feat/add-new-feature' into 'main'") merge_commit.parents = [base_commit, incomming_commit] # Action parsed_result = parser.parse(merge_commit) assert isinstance(parsed_result, ParseError) assert "Ignoring merge commit" in parsed_result.error python-semantic-release-9.21.0/tests/unit/semantic_release/commit_parser/test_parsed_commit.py000066400000000000000000000014321475670435200331070ustar00rootroot00000000000000from __future__ import annotations from typing import TYPE_CHECKING from semantic_release.commit_parser import ParsedCommit from semantic_release.version.version import LevelBump if TYPE_CHECKING: from tests.conftest import MakeCommitObjFn def test_parsed_commit_computed_properties(make_commit_obj: MakeCommitObjFn): message = "feat(parser): Add new parser pattern" commit = make_commit_obj(message) parsed_commit = ParsedCommit( bump=LevelBump.MINOR, type="feature", scope="parser", descriptions=["Add new parser pattern"], breaking_descriptions=[], commit=commit, ) assert message == parsed_commit.message assert commit.hexsha == parsed_commit.hexsha assert commit.hexsha[:7] == parsed_commit.short_hash python-semantic-release-9.21.0/tests/unit/semantic_release/commit_parser/test_scipy.py000066400000000000000000001255511475670435200314210ustar00rootroot00000000000000from __future__ import annotations from re import compile as regexp from textwrap import dedent from typing import TYPE_CHECKING, Iterable, Sequence import pytest from semantic_release.commit_parser.scipy import ( ScipyCommitParser, ScipyParserOptions, tag_to_section, ) from semantic_release.commit_parser.token import ParsedCommit, ParseError from semantic_release.enums import LevelBump from tests.const import SUPPORTED_ISSUE_CLOSURE_PREFIXES if TYPE_CHECKING: from tests.conftest import MakeCommitObjFn unwordwrap = regexp(r"((? """ ), [ None, { "bump": LevelBump.PATCH, "type": "fix", "scope": "release-config", "descriptions": [ "some commit subject", "An additional description", "Second paragraph with multiple lines that will be condensed", "Resolves: #12", "Signed-off-by: author ", ], "linked_issues": ("#12",), "linked_merge_request": "#10", }, ], ), ( "Multiple commits squashed via BitBucket PR resolution", dedent( """\ Merged in feat/my-awesome-stuff (pull request #10) BUG(release-config): some commit subject An additional description Second paragraph with multiple lines that will be condensed Resolves: #12 Signed-off-by: author ENH: implemented searching gizmos by keyword DOC(parser): add new parser pattern MAINT(cli)!: changed option name BREAKING CHANGE: A breaking change description Closes: #555 invalid non-conventional formatted commit """ ), [ None, { "bump": LevelBump.PATCH, "type": "fix", "scope": "release-config", "descriptions": [ "some commit subject", "An additional description", "Second paragraph with multiple lines that will be condensed", "Resolves: #12", "Signed-off-by: author ", ], "linked_issues": ("#12",), "linked_merge_request": "#10", }, { "bump": LevelBump.MINOR, "type": "feature", "descriptions": ["implemented searching gizmos by keyword"], "linked_merge_request": "#10", }, { "bump": LevelBump.NO_RELEASE, "type": "documentation", "scope": "parser", "descriptions": [ "add new parser pattern", ], "linked_merge_request": "#10", }, { "bump": LevelBump.MAJOR, "type": "fix", "scope": "cli", "descriptions": [ "changed option name", "BREAKING CHANGE: A breaking change description", "Closes: #555", # This is a bit unusual but its because there is no identifier that will # identify this as a separate commit so it gets included in the previous commit "invalid non-conventional formatted commit", ], "breaking_descriptions": [ "A breaking change description", ], "linked_issues": ("#555",), "linked_merge_request": "#10", }, ], ), ] ], ) def test_parser_squashed_commit_bitbucket_squash_style( default_scipy_parser: ScipyCommitParser, make_commit_obj: MakeCommitObjFn, commit_message: str, expected_commit_details: Sequence[dict | None], ): # Setup: Enable squash commit parsing parser = ScipyCommitParser( options=ScipyParserOptions( **{ **default_scipy_parser.options.__dict__, "parse_squash_commits": True, } ) ) # Build the commit object and parse it the_commit = make_commit_obj(commit_message) parsed_results = parser.parse(the_commit) # Validate the results assert isinstance(parsed_results, Iterable) assert ( len(expected_commit_details) == len(parsed_results) ), f"Expected {len(expected_commit_details)} parsed results, but got {len(parsed_results)}" for result, expected in zip(parsed_results, expected_commit_details): if expected is None: assert isinstance(result, ParseError) continue assert isinstance(result, ParsedCommit) # Required assert expected["bump"] == result.bump assert expected["type"] == result.type # Optional assert expected.get("scope", "") == result.scope # TODO: v10 change to tuples assert expected.get("descriptions", []) == result.descriptions assert expected.get("breaking_descriptions", []) == result.breaking_descriptions assert expected.get("linked_issues", ()) == result.linked_issues assert expected.get("linked_merge_request", "") == result.linked_merge_request @pytest.mark.parametrize( "commit_message, expected_commit_details", [ pytest.param( commit_message, expected_commit_details, id=test_id, ) for test_id, commit_message, expected_commit_details in [ ( "Single commit squashed via manual Git squash merge", dedent( """\ Squashed commit of the following: commit 63ec09b9e844e616dcaa7bae35a0b66671b59fbb Author: author Date: Sun Jan 19 12:05:23 2025 +0000 BUG(release-config): some commit subject An additional description Second paragraph with multiple lines that will be condensed Resolves: #12 Signed-off-by: author """ ), [ { "bump": LevelBump.PATCH, "type": "fix", "scope": "release-config", "descriptions": [ "some commit subject", "An additional description", "Second paragraph with multiple lines that will be condensed", "Resolves: #12", "Signed-off-by: author ", ], "linked_issues": ("#12",), } ], ), ( "Multiple commits squashed via manual Git squash merge", dedent( """\ Squashed commit of the following: commit 63ec09b9e844e616dcaa7bae35a0b66671b59fbb Author: author Date: Sun Jan 19 12:05:23 2025 +0000 BUG(release-config): some commit subject An additional description Second paragraph with multiple lines that will be condensed Resolves: #12 Signed-off-by: author commit 1f34769bf8352131ad6f4879b8c47becf3c7aa69 Author: author Date: Sat Jan 18 10:13:53 2025 +0000 ENH: implemented searching gizmos by keyword commit b2334a64a11ef745a17a2a4034f651e08e8c45a6 Author: author Date: Sat Jan 18 10:13:53 2025 +0000 DOC(parser): add new parser pattern commit 5f0292fb5a88c3a46e4a02bec35b85f5228e8e51 Author: author Date: Sat Jan 18 10:13:53 2025 +0000 MAINT(cli): changed option name BREAKING CHANGE: A breaking change description Closes: #555 commit 2f314e7924be161cfbf220d3b6e2a6189a3b5609 Author: author Date: Sat Jan 18 10:13:53 2025 +0000 invalid non-conventional formatted commit """ ), [ { "bump": LevelBump.PATCH, "type": "fix", "scope": "release-config", "descriptions": [ "some commit subject", "An additional description", "Second paragraph with multiple lines that will be condensed", "Resolves: #12", "Signed-off-by: author ", ], "linked_issues": ("#12",), }, { "bump": LevelBump.MINOR, "type": "feature", "descriptions": ["implemented searching gizmos by keyword"], }, { "bump": LevelBump.NO_RELEASE, "type": "documentation", "scope": "parser", "descriptions": [ "add new parser pattern", ], }, { "bump": LevelBump.MAJOR, "type": "fix", "scope": "cli", "descriptions": [ "changed option name", "BREAKING CHANGE: A breaking change description", "Closes: #555", ], "breaking_descriptions": [ "A breaking change description", ], "linked_issues": ("#555",), }, None, ], ), ] ], ) def test_parser_squashed_commit_git_squash_style( default_scipy_parser: ScipyCommitParser, make_commit_obj: MakeCommitObjFn, commit_message: str, expected_commit_details: Sequence[dict | None], ): # Setup: Enable squash commit parsing parser = ScipyCommitParser( options=ScipyParserOptions( **{ **default_scipy_parser.options.__dict__, "parse_squash_commits": True, } ) ) # Build the commit object and parse it the_commit = make_commit_obj(commit_message) parsed_results = parser.parse(the_commit) # Validate the results assert isinstance(parsed_results, Iterable) assert ( len(expected_commit_details) == len(parsed_results) ), f"Expected {len(expected_commit_details)} parsed results, but got {len(parsed_results)}" for result, expected in zip(parsed_results, expected_commit_details): if expected is None: assert isinstance(result, ParseError) continue assert isinstance(result, ParsedCommit) # Required assert expected["bump"] == result.bump assert expected["type"] == result.type # Optional assert expected.get("scope", "") == result.scope # TODO: v10 change to tuples assert expected.get("descriptions", []) == result.descriptions assert expected.get("breaking_descriptions", []) == result.breaking_descriptions assert expected.get("linked_issues", ()) == result.linked_issues assert expected.get("linked_merge_request", "") == result.linked_merge_request @pytest.mark.parametrize( "commit_message, expected_commit_details", [ pytest.param( commit_message, expected_commit_details, id=test_id, ) for test_id, commit_message, expected_commit_details in [ ( "Single commit squashed via GitHub PR resolution", dedent( """\ BUG(release-config): some commit subject (#10) An additional description Second paragraph with multiple lines that will be condensed Resolves: #12 Signed-off-by: author """ ), [ { "bump": LevelBump.PATCH, "type": "fix", "scope": "release-config", "descriptions": [ "some commit subject (#10)", "An additional description", "Second paragraph with multiple lines that will be condensed", "Resolves: #12", "Signed-off-by: author ", ], "linked_issues": ("#12",), "linked_merge_request": "#10", }, ], ), ( "Multiple commits squashed via GitHub PR resolution", dedent( """\ BUG(release-config): some commit subject (#10) An additional description Second paragraph with multiple lines that will be condensed Resolves: #12 Signed-off-by: author * ENH: implemented searching gizmos by keyword * DOC(parser): add new parser pattern * MAINT(cli)!: changed option name BREAKING CHANGE: A breaking change description Closes: #555 * invalid non-conventional formatted commit """ ), [ { "bump": LevelBump.PATCH, "type": "fix", "scope": "release-config", "descriptions": [ # TODO: v10 removal of PR number from subject "some commit subject (#10)", "An additional description", "Second paragraph with multiple lines that will be condensed", "Resolves: #12", "Signed-off-by: author ", ], "linked_issues": ("#12",), "linked_merge_request": "#10", }, { "bump": LevelBump.MINOR, "type": "feature", "descriptions": ["implemented searching gizmos by keyword"], "linked_merge_request": "#10", }, { "bump": LevelBump.NO_RELEASE, "type": "documentation", "scope": "parser", "descriptions": [ "add new parser pattern", ], "linked_merge_request": "#10", }, { "bump": LevelBump.MAJOR, "type": "fix", "scope": "cli", "descriptions": [ "changed option name", "BREAKING CHANGE: A breaking change description", "Closes: #555", # This is a bit unusual but its because there is no identifier that will # identify this as a separate commit so it gets included in the previous commit "* invalid non-conventional formatted commit", ], "breaking_descriptions": [ "A breaking change description", ], "linked_issues": ("#555",), "linked_merge_request": "#10", }, ], ), ] ], ) def test_parser_squashed_commit_github_squash_style( default_scipy_parser: ScipyCommitParser, make_commit_obj: MakeCommitObjFn, commit_message: str, expected_commit_details: Sequence[dict | None], ): # Setup: Enable squash commit parsing parser = ScipyCommitParser( options=ScipyParserOptions( **{ **default_scipy_parser.options.__dict__, "parse_squash_commits": True, } ) ) # Build the commit object and parse it the_commit = make_commit_obj(commit_message) parsed_results = parser.parse(the_commit) # Validate the results assert isinstance(parsed_results, Iterable) assert ( len(expected_commit_details) == len(parsed_results) ), f"Expected {len(expected_commit_details)} parsed results, but got {len(parsed_results)}" for result, expected in zip(parsed_results, expected_commit_details): if expected is None: assert isinstance(result, ParseError) continue assert isinstance(result, ParsedCommit) # Required assert expected["bump"] == result.bump assert expected["type"] == result.type # Optional assert expected.get("scope", "") == result.scope # TODO: v10 change to tuples assert expected.get("descriptions", []) == result.descriptions assert expected.get("breaking_descriptions", []) == result.breaking_descriptions assert expected.get("linked_issues", ()) == result.linked_issues assert expected.get("linked_merge_request", "") == result.linked_merge_request @pytest.mark.parametrize( "message, linked_issues", # TODO: in v10, we will remove the issue reference footers from the descriptions [ *[ # GitHub, Gitea, GitLab style ( f"ENH: add magic parser\n\n{footer}", linked_issues, ) for footer_prefix in SUPPORTED_ISSUE_CLOSURE_PREFIXES for footer, linked_issues in [ # Single issue ( f"{footer_prefix.capitalize()}: #555", ["#555"], ), # Git Footer style (capitalized) (f"{footer_prefix.lower()}: #555", ["#555"]), # lowercase prefix (f"{footer_prefix.upper()}: #555", ["#555"]), # uppercase prefix # Mulitple issues (variant 1: list with one prefix, not supported by GitHub) ( f"{footer_prefix}: #555,#444", ["#444", "#555"], ), # Comma separated (w/o space) ( f"{footer_prefix}: #555, #444", ["#444", "#555"], ), # Comma separated (w/ space) ( f"{footer_prefix}: #555 , #444", ["#444", "#555"], ), # Comma separated (w/ extra space) (f"{footer_prefix}: #555 #444", ["#444", "#555"]), # Space separated ( f"{footer_prefix}: #555;#444", ["#444", "#555"], ), # semicolon separated (w/o space) ( f"{footer_prefix}: #555; #444", ["#444", "#555"], ), # semicolon separated (w/ space) ( f"{footer_prefix}: #555 ; #444", ["#444", "#555"], ), # semicolon separated (w/ extra space) ( f"{footer_prefix}: #555/#444", ["#444", "#555"], ), # slash separated (w/o space) ( f"{footer_prefix}: #555/ #444", ["#444", "#555"], ), # slash separated (w/ space) ( f"{footer_prefix}: #555 / #444", ["#444", "#555"], ), # slash separated (w/ extra space) ( f"{footer_prefix}: #555Ƽ", ["#444", "#555"], ), # ampersand separated (w/o space) ( f"{footer_prefix}: #555& #444", ["#444", "#555"], ), # ampersand separated (w/ space) ( f"{footer_prefix}: #555 & #444", ["#444", "#555"], ), # ampersand separated (w/ extra space) (f"{footer_prefix}: #555 and #444", ["#444", "#555"]), # and separated ( f"{footer_prefix}: #555, #444, and #333", ["#333", "#444", "#555"], ), # and separated # Mulitple issues (variant 2: multiple footers, supported by GitHub) (f"{footer_prefix}: #555\n{footer_prefix}: #444", ["#444", "#555"]), # More than 2 issues ( f"{footer_prefix}: #555, #444, #333", ["#333", "#444", "#555"], ), # More than 2 issues (force numerical sort) ( f"{footer_prefix}: #555, #3333, #444", ["#444", "#555", "#3333"], ), # Single issue listed multiple times (f"{footer_prefix}: #555, #555", ["#555"]), # Multiple footers with the same issue (f"{footer_prefix}: #555\n{footer_prefix}: #555", ["#555"]), # Multiple issues via multiple inline git footers (f"{footer_prefix}: #555, {footer_prefix}: #444", ["#444", "#555"]), # Multiple valid footers ( str.join( "\n", [ f"{footer_prefix}: #555", "Signed-off-by: johndoe ", f"{footer_prefix}: #444", ], ), ["#444", "#555"], ), # ----------------------------------------- Invalid Sets ----------------------------------------- # # Must have colon because it is a git footer, these will not return a linked issue (f"{footer_prefix} #666", []), (f"{footer_prefix} #666, #777", []), # Invalid Multiple issues (although it is supported by GitHub, it is not supported by the parser) (f"{footer_prefix} #666, {footer_prefix} #777", []), # Invalid 'and' separation (f"{footer_prefix}: #666and#777", ["#666and#777"]), # Invalid prefix ("ref: #666", []), # body mentions an issue and has a different git footer ( "In #666, the devils in the details...\n\nSigned-off-by: johndoe ", [], ), ] ], *[ # JIRA style ( f"ENH(parser): add magic parser\n\n{footer}", linked_issues, ) for footer_prefix in SUPPORTED_ISSUE_CLOSURE_PREFIXES for footer, linked_issues in [ # Single issue ( f"{footer_prefix.capitalize()}: ABC-555", ["ABC-555"], ), # Git Footer style (capitalized) (f"{footer_prefix.lower()}: ABC-555", ["ABC-555"]), # lowercase prefix (f"{footer_prefix.upper()}: ABC-555", ["ABC-555"]), # uppercase prefix # Mulitple issues (variant 1: list with one prefix, not supported by GitHub) ( f"{footer_prefix}: ABC-555,ABC-444", ["ABC-444", "ABC-555"], ), # Comma separated (w/o space) ( f"{footer_prefix}: ABC-555, ABC-444", ["ABC-444", "ABC-555"], ), # Comma separated (w/ space) ( f"{footer_prefix}: ABC-555 , ABC-444", ["ABC-444", "ABC-555"], ), # Comma separated (w/ extra space) ( f"{footer_prefix}: ABC-555 ABC-444", ["ABC-444", "ABC-555"], ), # Space separated ( f"{footer_prefix}: ABC-555;ABC-444", ["ABC-444", "ABC-555"], ), # semicolon separated (w/o space) ( f"{footer_prefix}: ABC-555; ABC-444", ["ABC-444", "ABC-555"], ), # semicolon separated (w/ space) ( f"{footer_prefix}: ABC-555 ; ABC-444", ["ABC-444", "ABC-555"], ), # semicolon separated (w/ extra space) ( f"{footer_prefix}: ABC-555/ABC-444", ["ABC-444", "ABC-555"], ), # slash separated (w/o space) ( f"{footer_prefix}: ABC-555/ ABC-444", ["ABC-444", "ABC-555"], ), # slash separated (w/ space) ( f"{footer_prefix}: ABC-555 / ABC-444", ["ABC-444", "ABC-555"], ), # slash separated (w/ extra space) ( f"{footer_prefix}: ABC-555&ABC-444", ["ABC-444", "ABC-555"], ), # ampersand separated (w/o space) ( f"{footer_prefix}: ABC-555& ABC-444", ["ABC-444", "ABC-555"], ), # ampersand separated (w/ space) ( f"{footer_prefix}: ABC-555 & ABC-444", ["ABC-444", "ABC-555"], ), # ampersand separated (w/ extra space) ( f"{footer_prefix}: ABC-555 and ABC-444", ["ABC-444", "ABC-555"], ), # and separated ( f"{footer_prefix}: ABC-555, ABC-444, and ABC-333", ["ABC-333", "ABC-444", "ABC-555"], ), # and separated # Mulitple issues (variant 2: multiple footers, supported by GitHub) ( f"{footer_prefix}: ABC-555\n{footer_prefix}: ABC-444", ["ABC-444", "ABC-555"], ), # More than 2 issues ( f"{footer_prefix}: ABC-555, ABC-444, ABC-333", ["ABC-333", "ABC-444", "ABC-555"], ), # More than 2 issues (force numerical sort) ( f"{footer_prefix}: ABC-555, ABC-3333, ABC-444", ["ABC-444", "ABC-555", "ABC-3333"], ), # Single issue listed multiple times (f"{footer_prefix}: ABC-555, ABC-555", ["ABC-555"]), # Multiple footers with the same issue (f"{footer_prefix}: ABC-555\n{footer_prefix}: ABC-555", ["ABC-555"]), # Multiple issues via multiple inline git footers ( f"{footer_prefix}: ABC-666, {footer_prefix}: ABC-777", ["ABC-666", "ABC-777"], ), # Multiple valid footers ( str.join( "\n", [ f"{footer_prefix}: ABC-555", "Signed-off-by: johndoe ", f"{footer_prefix}: ABC-444", ], ), ["ABC-444", "ABC-555"], ), # ----------------------------------------- Invalid Sets ----------------------------------------- # # Must have colon because it is a git footer, these will not return a linked issue (f"{footer_prefix} ABC-666", []), (f"{footer_prefix} ABC-666, ABC-777", []), # Invalid Multiple issues (although it is supported by GitHub, it is not supported by the parser) (f"{footer_prefix} ABC-666, {footer_prefix} ABC-777", []), # Invalid 'and' separation (f"{footer_prefix}: ABC-666andABC-777", ["ABC-666andABC-777"]), # Invalid prefix ("ref: ABC-666", []), # body mentions an issue and has a different git footer ( "In ABC-666, the devils in the details...\n\nSigned-off-by: johndoe ", [], ), ] ], *[ ( f"ENH(parser): add magic parser\n\n{footer}", linked_issues, ) for footer, linked_issues in [ # Multiple footers with the same issue but different prefixes ("Resolves: #555\nfix: #444", ["#444", "#555"]), # Whitespace padded footer (" Resolves: #555\n", ["#555"]), ] ], ( # Only grabs the issue reference when there is a GitHub PR reference in the subject "ENH(parser): add magic parser (#123)\n\nCloses: #555", ["#555"], ), # Does not grab an issue when there is only a GitHub PR reference in the subject ("ENH(parser): add magic parser (#123)", []), # Does not grab an issue when there is only a Bitbucket PR reference in the subject ("ENH(parser): add magic parser (pull request #123)", []), ], ) def test_parser_return_linked_issues_from_commit_message( default_scipy_parser: ScipyCommitParser, message: str, linked_issues: Sequence[str], make_commit_obj: MakeCommitObjFn, ): parsed_results = default_scipy_parser.parse(make_commit_obj(message)) assert isinstance(parsed_results, Iterable) assert len(parsed_results) == 1 result = next(iter(parsed_results)) assert isinstance(result, ParsedCommit) assert tuple(linked_issues) == result.linked_issues @pytest.mark.parametrize( "message, notices", [ pytest.param( message, notices, id=test_id, ) for test_id, message, notices in [ ( "single notice", dedent( """\ BUG(parser): fix regex in scipy parser NOTICE: This is a notice """ ), ["This is a notice"], ), ( "multiline notice", dedent( """\ BUG(parser): fix regex in scipy parser NOTICE: This is a notice that is longer than other notices """ ), ["This is a notice that is longer than other notices"], ), ( "multiple notices", dedent( """\ BUG(parser): fix regex in scipy parser NOTICE: This is a notice NOTICE: This is a second notice """ ), ["This is a notice", "This is a second notice"], ), ( "notice with other footer", dedent( """\ BUG(parser): fix regex in scipy parser BREAKING CHANGE: This is a breaking change NOTICE: This is a notice """ ), ["This is a notice"], ), ] ], ) def test_parser_return_release_notices_from_commit_message( default_scipy_parser: ScipyCommitParser, message: str, notices: Sequence[str], make_commit_obj: MakeCommitObjFn, ): parsed_results = default_scipy_parser.parse(make_commit_obj(message)) assert isinstance(parsed_results, Iterable) assert len(parsed_results) == 1 result = next(iter(parsed_results)) assert isinstance(result, ParsedCommit) assert tuple(notices) == result.release_notices # TODO: v10, remove this # full_description = str.join("\n\n", result.descriptions) # full_notice = str.join("\n\n", result.release_notices) # assert full_notice not in full_description def test_parser_ignore_merge_commit( default_scipy_parser: ScipyCommitParser, make_commit_obj: MakeCommitObjFn, ): # Setup: Enable parsing of linked issues parser = ScipyCommitParser( options=ScipyParserOptions( **{ **default_scipy_parser.options.__dict__, "ignore_merge_commits": True, } ) ) base_commit = make_commit_obj("Merge branch 'fix/fix-feature' into 'main'") incomming_commit = make_commit_obj("feat: add a new feature") # Setup: Create a merge commit merge_commit = make_commit_obj("Merge branch 'feat/add-new-feature' into 'main'") merge_commit.parents = [base_commit, incomming_commit] # Action parsed_result = parser.parse(merge_commit) assert isinstance(parsed_result, ParseError) assert "Ignoring merge commit" in parsed_result.error python-semantic-release-9.21.0/tests/unit/semantic_release/commit_parser/test_util.py000066400000000000000000000015041475670435200312360ustar00rootroot00000000000000import pytest from semantic_release.commit_parser.util import parse_paragraphs @pytest.mark.parametrize( "text, expected", [ ("", []), ("\n\n \n\n \n", []), # Unix (LF) - empty lines ("\r\n\r\n \r\n\r\n \n", []), # Windows (CRLF) - empty lines ("\n\nA\n\nB\n", ["A", "B"]), # Unix (LF) ("\r\n\r\nA\r\n\r\nB\n", ["A", "B"]), # Windows (CRLF) ( "Long\nexplanation\n\nfull of interesting\ndetails", ["Long explanation", "full of interesting details"], ), ( # Windows uses CRLF "Long\r\nexplanation\r\n\r\nfull of interesting\r\ndetails", ["Long explanation", "full of interesting details"], ), ], ) def test_parse_paragraphs(text, expected): assert parse_paragraphs(text) == expected python-semantic-release-9.21.0/tests/unit/semantic_release/hvcs/000077500000000000000000000000001475670435200247475ustar00rootroot00000000000000python-semantic-release-9.21.0/tests/unit/semantic_release/hvcs/__init__.py000066400000000000000000000000001475670435200270460ustar00rootroot00000000000000python-semantic-release-9.21.0/tests/unit/semantic_release/hvcs/test__base.py000066400000000000000000000034071475670435200274350ustar00rootroot00000000000000from __future__ import annotations from typing import TYPE_CHECKING import pytest from pytest_lazy_fixtures.lazy_fixture import lf as lazy_fixture from semantic_release.hvcs._base import HvcsBase from tests.const import EXAMPLE_REPO_NAME, EXAMPLE_REPO_OWNER from tests.fixtures.git_repo import example_git_https_url, example_git_ssh_url if TYPE_CHECKING: from typing import Any, Callable class ArbitraryHvcs(HvcsBase): def remote_url(self, use_token: bool) -> str: return super().remote_url(use_token) def get_changelog_context_filters(self) -> tuple[Callable[..., Any], ...]: return super().get_changelog_context_filters() @pytest.mark.parametrize( "remote_url, repo_name", [ (lazy_fixture(example_git_ssh_url.__name__), EXAMPLE_REPO_NAME), (lazy_fixture(example_git_https_url.__name__), EXAMPLE_REPO_NAME), ("git@my.corp.custom.domain:very_serious/business.git", "business"), ], ) def test_get_repository_owner(remote_url, repo_name): client = ArbitraryHvcs(remote_url) assert client.repo_name == repo_name @pytest.mark.parametrize( "remote_url, owner", [ (lazy_fixture(example_git_ssh_url.__name__), EXAMPLE_REPO_OWNER), (lazy_fixture(example_git_https_url.__name__), EXAMPLE_REPO_OWNER), ("git@my.corp.custom.domain:very_serious/business.git", "very_serious"), ], ) def test_get_repository_name(remote_url, owner): client = ArbitraryHvcs(remote_url) assert client.owner == owner @pytest.mark.parametrize( "bad_url", [ "a" * 25, "https://a/b/c/d/.git", "https://github.com/wrong", "git@gitlab.com/somewhere", ], ) def test_hvcs_parse_error(bad_url: str): with pytest.raises(ValueError): ArbitraryHvcs(bad_url) python-semantic-release-9.21.0/tests/unit/semantic_release/hvcs/test_bitbucket.py000066400000000000000000000241131475670435200303350ustar00rootroot00000000000000from __future__ import annotations import os from unittest import mock import pytest from semantic_release.hvcs.bitbucket import Bitbucket from tests.const import EXAMPLE_HVCS_DOMAIN, EXAMPLE_REPO_NAME, EXAMPLE_REPO_OWNER @pytest.fixture def default_bitbucket_client(): remote_url = ( f"git@{Bitbucket.DEFAULT_DOMAIN}:{EXAMPLE_REPO_OWNER}/{EXAMPLE_REPO_NAME}.git" ) return Bitbucket(remote_url=remote_url) @pytest.mark.parametrize( str.join( ", ", [ "patched_os_environ", "hvcs_domain", "hvcs_api_domain", "expected_hvcs_domain", "expected_api_url", "insecure", ], ), [ # No env vars as CI is handled by Bamboo or Jenkins (which require user defined defaults) # API paths are different in BitBucket Cloud (bitbucket.org) vs BitBucket Data Center ( # Default values (BitBucket Cloud) {}, None, None, f"https://{Bitbucket.DEFAULT_DOMAIN}", Bitbucket.DEFAULT_API_URL_CLOUD, False, ), ( # Explicitly set default values {}, Bitbucket.DEFAULT_DOMAIN, Bitbucket.DEFAULT_API_URL_CLOUD, f"https://{Bitbucket.DEFAULT_DOMAIN}", Bitbucket.DEFAULT_API_URL_CLOUD, False, ), ( # Explicitly set custom values with full api path {}, EXAMPLE_HVCS_DOMAIN, f"{EXAMPLE_HVCS_DOMAIN}/rest/api/1.0", f"https://{EXAMPLE_HVCS_DOMAIN}", f"https://{EXAMPLE_HVCS_DOMAIN}/rest/api/1.0", False, ), ( # Explicitly defined api as subdomain # POSSIBLY WRONG ASSUMPTION of Api path for BitBucket Server {}, f"https://{EXAMPLE_HVCS_DOMAIN}", f"https://api.{EXAMPLE_HVCS_DOMAIN}", f"https://{EXAMPLE_HVCS_DOMAIN}", f"https://api.{EXAMPLE_HVCS_DOMAIN}/rest/api/1.0", False, ), ( # Custom domain for on premise BitBucket Server (derive api endpoint) {}, EXAMPLE_HVCS_DOMAIN, None, f"https://{EXAMPLE_HVCS_DOMAIN}", f"https://{EXAMPLE_HVCS_DOMAIN}/rest/api/1.0", False, ), ( # Custom domain with path prefix {}, "special.custom.server/bitbucket", None, "https://special.custom.server/bitbucket", "https://special.custom.server/bitbucket/rest/api/1.0", False, ), ( # Allow insecure http connections explicitly {}, f"http://{EXAMPLE_HVCS_DOMAIN}", f"http://{EXAMPLE_HVCS_DOMAIN}/rest/api/1.0", f"http://{EXAMPLE_HVCS_DOMAIN}", f"http://{EXAMPLE_HVCS_DOMAIN}/rest/api/1.0", True, ), ( # Allow insecure http connections explicitly & imply insecure api domain {}, f"http://{EXAMPLE_HVCS_DOMAIN}", None, f"http://{EXAMPLE_HVCS_DOMAIN}", f"http://{EXAMPLE_HVCS_DOMAIN}/rest/api/1.0", True, ), ( # Infer insecure connection from user configuration {}, EXAMPLE_HVCS_DOMAIN, f"{EXAMPLE_HVCS_DOMAIN}/rest/api/1.0", f"http://{EXAMPLE_HVCS_DOMAIN}", f"http://{EXAMPLE_HVCS_DOMAIN}/rest/api/1.0", True, ), ( # Infer insecure connection from user configuration & imply insecure api domain {}, EXAMPLE_HVCS_DOMAIN, None, f"http://{EXAMPLE_HVCS_DOMAIN}", f"http://{EXAMPLE_HVCS_DOMAIN}/rest/api/1.0", True, ), ], ) @pytest.mark.parametrize( "remote_url", [ f"git@{Bitbucket.DEFAULT_DOMAIN}:{EXAMPLE_REPO_OWNER}/{EXAMPLE_REPO_NAME}.git", f"https://{Bitbucket.DEFAULT_DOMAIN}/{EXAMPLE_REPO_OWNER}/{EXAMPLE_REPO_NAME}.git", ], ) @pytest.mark.parametrize("token", ("abc123", None)) def test_bitbucket_client_init( patched_os_environ: dict[str, str], hvcs_domain: str | None, hvcs_api_domain: str | None, expected_hvcs_domain: str, expected_api_url: str, remote_url: str, token: str | None, insecure: bool, ): with mock.patch.dict(os.environ, patched_os_environ, clear=True): client = Bitbucket( remote_url=remote_url, hvcs_domain=hvcs_domain, hvcs_api_domain=hvcs_api_domain, token=token, allow_insecure=insecure, ) assert expected_hvcs_domain == str(client.hvcs_domain) assert expected_api_url == str(client.api_url) assert token == client.token assert remote_url == client._remote_url @pytest.mark.parametrize( "hvcs_domain, hvcs_api_domain, insecure", [ # Bad base domain schemes (f"ftp://{EXAMPLE_HVCS_DOMAIN}", None, False), (f"ftp://{EXAMPLE_HVCS_DOMAIN}", None, True), # Unallowed insecure connections when base domain is insecure (f"http://{EXAMPLE_HVCS_DOMAIN}", None, False), # Bad API domain schemes (None, f"ftp://api.{EXAMPLE_HVCS_DOMAIN}", False), (None, f"ftp://api.{EXAMPLE_HVCS_DOMAIN}", True), # Unallowed insecure connections when api domain is insecure (None, f"http://{EXAMPLE_HVCS_DOMAIN}", False), ], ) def test_bitbucket_client_init_with_invalid_scheme( hvcs_domain: str | None, hvcs_api_domain: str | None, insecure: bool, ): with pytest.raises(ValueError), mock.patch.dict(os.environ, {}, clear=True): Bitbucket( remote_url=f"https://{EXAMPLE_HVCS_DOMAIN}/{EXAMPLE_REPO_OWNER}/{EXAMPLE_REPO_NAME}.git", hvcs_domain=hvcs_domain, hvcs_api_domain=hvcs_api_domain, allow_insecure=insecure, ) @pytest.mark.parametrize( "patched_os_environ, expected_owner, expected_name", [ ({}, None, None), ({"BITBUCKET_REPO_FULL_NAME": "path/to/repo/foo"}, "path/to/repo", "foo"), ], ) def test_bitbucket_get_repository_owner_and_name( default_bitbucket_client: Bitbucket, patched_os_environ: dict[str, str], expected_owner: str, expected_name: str, ): # expected results should be a tuple[namespace, repo_name] # when None, the default values are used which matches default_bitbucket_client's setup expected_result = ( expected_owner or EXAMPLE_REPO_OWNER, expected_name or EXAMPLE_REPO_NAME, ) with mock.patch.dict(os.environ, patched_os_environ, clear=True): # Execute in mocked environment result = default_bitbucket_client._get_repository_owner_and_name() # Evaluate (expected -> actual) assert expected_result == result def test_compare_url(default_bitbucket_client: Bitbucket): start_rev = "revA" end_rev = "revB" expected_url = ( "{server}/{owner}/{repo}/branches/compare/{from_rev}%0D{to_rev}".format( server=default_bitbucket_client.hvcs_domain.url, owner=default_bitbucket_client.owner, repo=default_bitbucket_client.repo_name, from_rev=start_rev, to_rev=end_rev, ) ) actual_url = default_bitbucket_client.compare_url( from_rev=start_rev, to_rev=end_rev ) assert expected_url == actual_url @pytest.mark.parametrize( "patched_os_environ, use_token, token, remote_url, expected_auth_url", [ ( {"BITBUCKET_USER": "foo"}, False, "", f"git@{Bitbucket.DEFAULT_DOMAIN}:custom/example.git", f"git@{Bitbucket.DEFAULT_DOMAIN}:custom/example.git", ), ( {}, False, "aabbcc", f"git@{Bitbucket.DEFAULT_DOMAIN}:custom/example.git", f"git@{Bitbucket.DEFAULT_DOMAIN}:custom/example.git", ), ( {}, True, "aabbcc", f"git@{Bitbucket.DEFAULT_DOMAIN}:custom/example.git", f"https://x-token-auth:aabbcc@{Bitbucket.DEFAULT_DOMAIN}/custom/example.git", ), ( {"BITBUCKET_USER": "foo"}, False, "aabbcc", f"git@{Bitbucket.DEFAULT_DOMAIN}:custom/example.git", f"git@{Bitbucket.DEFAULT_DOMAIN}:custom/example.git", ), ( {"BITBUCKET_USER": "foo"}, True, "aabbcc", f"git@{Bitbucket.DEFAULT_DOMAIN}:custom/example.git", f"https://foo:aabbcc@{Bitbucket.DEFAULT_DOMAIN}/custom/example.git", ), ], ) def test_remote_url( default_bitbucket_client: Bitbucket, patched_os_environ: dict[str, str], use_token: bool, token: str, remote_url: str, expected_auth_url: str, ): with mock.patch.dict(os.environ, patched_os_environ, clear=True): default_bitbucket_client._remote_url = remote_url default_bitbucket_client.token = token assert expected_auth_url == default_bitbucket_client.remote_url( use_token=use_token ) def test_commit_hash_url(default_bitbucket_client: Bitbucket): sha = "244f7e11bcb1e1ce097db61594056bc2a32189a0" expected_url = "{server}/{owner}/{repo}/commits/{sha}".format( server=default_bitbucket_client.hvcs_domain, owner=default_bitbucket_client.owner, repo=default_bitbucket_client.repo_name, sha=sha, ) assert expected_url == default_bitbucket_client.commit_hash_url(sha) @pytest.mark.parametrize("pr_number", (666, "666", "#666")) def test_pull_request_url(default_bitbucket_client: Bitbucket, pr_number: int | str): expected_url = "{server}/{owner}/{repo}/pull-requests/{pr_number}".format( server=default_bitbucket_client.hvcs_domain, owner=default_bitbucket_client.owner, repo=default_bitbucket_client.repo_name, pr_number=str(pr_number).lstrip("#"), ) actual_url = default_bitbucket_client.pull_request_url(pr_number=pr_number) assert expected_url == actual_url python-semantic-release-9.21.0/tests/unit/semantic_release/hvcs/test_gitea.py000066400000000000000000000672711475670435200274660ustar00rootroot00000000000000from __future__ import annotations import fnmatch import glob import os import re from typing import TYPE_CHECKING from unittest import mock from urllib.parse import urlencode import pytest import requests_mock from requests import HTTPError, Response, Session from requests.auth import _basic_auth_str from semantic_release.hvcs.gitea import Gitea from semantic_release.hvcs.token_auth import TokenAuth from tests.const import ( EXAMPLE_HVCS_DOMAIN, EXAMPLE_REPO_NAME, EXAMPLE_REPO_OWNER, RELEASE_NOTES, ) from tests.fixtures.example_project import init_example_project if TYPE_CHECKING: from pathlib import Path from typing import Generator from tests.conftest import NetrcFileFn @pytest.fixture def default_gitea_client() -> Generator[Gitea, None, None]: remote_url = ( f"git@{Gitea.DEFAULT_DOMAIN}:{EXAMPLE_REPO_OWNER}/{EXAMPLE_REPO_NAME}.git" ) with mock.patch.dict(os.environ, {}, clear=True): yield Gitea(remote_url=remote_url) @pytest.mark.parametrize( str.join( ", ", [ "patched_os_environ", "hvcs_domain", "expected_hvcs_domain", "insecure", ], ), # NOTE: Gitea does not have a different api domain [ # Default values ({}, None, f"https://{Gitea.DEFAULT_DOMAIN}", False), ( # Gather domain from environment {"GITEA_SERVER_URL": "https://special.custom.server/"}, None, "https://special.custom.server", False, ), ( # Custom domain with path prefix (derives from environment) {"GITEA_SERVER_URL": "https://special.custom.server/vcs/"}, None, "https://special.custom.server/vcs", False, ), ( # Ignore environment & use provided parameter value (ie from user config) {"GITEA_SERVER_URL": "https://special.custom.server/"}, f"https://{EXAMPLE_HVCS_DOMAIN}", f"https://{EXAMPLE_HVCS_DOMAIN}", False, ), ( # Allow insecure http connections explicitly {}, f"http://{EXAMPLE_HVCS_DOMAIN}", f"http://{EXAMPLE_HVCS_DOMAIN}", True, ), ( # Infer insecure connection from user configuration {}, EXAMPLE_HVCS_DOMAIN, f"http://{EXAMPLE_HVCS_DOMAIN}", True, ), ], ) @pytest.mark.parametrize( "remote_url", [ f"git@{Gitea.DEFAULT_DOMAIN}:{EXAMPLE_REPO_OWNER}/{EXAMPLE_REPO_NAME}.git", f"https://{Gitea.DEFAULT_DOMAIN}/{EXAMPLE_REPO_OWNER}/{EXAMPLE_REPO_NAME}.git", ], ) @pytest.mark.parametrize("token", ("abc123", None)) def test_gitea_client_init( patched_os_environ: dict[str, str], hvcs_domain: str | None, expected_hvcs_domain: str, remote_url: str, token: str | None, insecure: bool, ): with mock.patch.dict(os.environ, patched_os_environ, clear=True): client = Gitea( remote_url=remote_url, hvcs_domain=hvcs_domain, token=token, allow_insecure=insecure, ) # Evaluate (expected -> actual) assert expected_hvcs_domain == client.hvcs_domain.url assert f"{expected_hvcs_domain}/api/v1" == str(client.api_url) assert token == client.token assert remote_url == client._remote_url assert hasattr(client, "session") assert isinstance(getattr(client, "session", None), Session) @pytest.mark.parametrize( "hvcs_domain, insecure", [ (f"ftp://{EXAMPLE_HVCS_DOMAIN}", False), (f"ftp://{EXAMPLE_HVCS_DOMAIN}", True), (f"http://{EXAMPLE_HVCS_DOMAIN}", False), ], ) def test_gitea_client_init_with_invalid_scheme(hvcs_domain: str, insecure: bool): with pytest.raises(ValueError), mock.patch.dict(os.environ, {}, clear=True): Gitea( remote_url=f"https://{EXAMPLE_HVCS_DOMAIN}/{EXAMPLE_REPO_OWNER}/{EXAMPLE_REPO_NAME}.git", hvcs_domain=hvcs_domain, allow_insecure=insecure, ) def test_gitea_get_repository_owner_and_name(default_gitea_client: Gitea): expected_result = (EXAMPLE_REPO_OWNER, EXAMPLE_REPO_NAME) # Execute method under test result = default_gitea_client._get_repository_owner_and_name() # Evaluate (expected -> actual) assert expected_result == result @pytest.mark.parametrize( "use_token, token, remote_url, expected_auth_url", [ ( False, "", f"git@{Gitea.DEFAULT_DOMAIN}:custom/example.git", f"git@{Gitea.DEFAULT_DOMAIN}:custom/example.git", ), ( True, "", f"git@{Gitea.DEFAULT_DOMAIN}:custom/example.git", f"git@{Gitea.DEFAULT_DOMAIN}:custom/example.git", ), ( False, "aabbcc", f"git@{Gitea.DEFAULT_DOMAIN}:custom/example.git", f"git@{Gitea.DEFAULT_DOMAIN}:custom/example.git", ), ( True, "aabbcc", f"git@{Gitea.DEFAULT_DOMAIN}:custom/example.git", f"https://aabbcc@{Gitea.DEFAULT_DOMAIN}/custom/example.git", ), ], ) def test_remote_url( default_gitea_client: Gitea, use_token: bool, token: str, remote_url: str, expected_auth_url: str, ): default_gitea_client._remote_url = remote_url default_gitea_client.token = token assert expected_auth_url == default_gitea_client.remote_url(use_token=use_token) def test_commit_hash_url(default_gitea_client: Gitea): sha = "hashashash" expected_url = "{server}/{owner}/{repo}/commit/{sha}".format( server=default_gitea_client.hvcs_domain.url, owner=default_gitea_client.owner, repo=default_gitea_client.repo_name, sha=sha, ) assert expected_url == default_gitea_client.commit_hash_url(sha) @pytest.mark.parametrize("issue_number", (666, "666", "#666")) def test_issue_url(default_gitea_client: Gitea, issue_number: int | str): expected_url = "{server}/{owner}/{repo}/issues/{issue_number}".format( server=default_gitea_client.hvcs_domain.url, owner=default_gitea_client.owner, repo=default_gitea_client.repo_name, issue_number=str(issue_number).lstrip("#"), ) assert expected_url == default_gitea_client.issue_url(issue_num=issue_number) @pytest.mark.parametrize("pr_number", (666, "666", "#666")) def test_pull_request_url(default_gitea_client: Gitea, pr_number: int | str): expected_url = "{server}/{owner}/{repo}/pulls/{pr_number}".format( server=default_gitea_client.hvcs_domain.url, owner=default_gitea_client.owner, repo=default_gitea_client.repo_name, pr_number=str(pr_number).lstrip("#"), ) actual_url = default_gitea_client.pull_request_url(pr_number=pr_number) assert expected_url == actual_url @pytest.mark.parametrize("release_id", (42, 666)) def test_asset_upload_url(default_gitea_client: Gitea, release_id: int): expected_url = "{server}/repos/{owner}/{repo}/releases/{release_id}/assets".format( server=default_gitea_client.api_url, owner=default_gitea_client.owner, repo=default_gitea_client.repo_name, release_id=release_id, ) actual_url = default_gitea_client.asset_upload_url(release_id=release_id) assert expected_url == actual_url ############ # Tests which need http response mocking ############ gitea_matcher = re.compile(rf"^https://{Gitea.DEFAULT_DOMAIN}") gitea_api_matcher = re.compile( rf"^https://{Gitea.DEFAULT_DOMAIN}{Gitea.DEFAULT_API_PATH}" ) @pytest.mark.parametrize("status_code", [201]) @pytest.mark.parametrize("mock_release_id", range(3)) @pytest.mark.parametrize("prerelease", (True, False)) def test_create_release_succeeds( default_gitea_client: Gitea, mock_release_id: int, prerelease: bool, status_code: int, ): tag = "v1.0.0" expected_num_requests = 1 expected_http_method = "POST" expected_request_url = "{api_url}/repos/{owner}/{repo_name}/releases".format( api_url=default_gitea_client.api_url, owner=default_gitea_client.owner, repo_name=default_gitea_client.repo_name, ) expected_request_body = { "tag_name": tag, "name": tag, "body": RELEASE_NOTES, "draft": False, "prerelease": prerelease, } with requests_mock.Mocker(session=default_gitea_client.session) as m: # mock the response m.register_uri( "POST", gitea_api_matcher, json={"id": mock_release_id}, status_code=status_code, ) # Execute method under test actual_rtn_val = default_gitea_client.create_release( tag, RELEASE_NOTES, prerelease ) # Evaluate (expected -> actual) assert mock_release_id == actual_rtn_val assert m.called assert expected_num_requests == len(m.request_history) assert expected_http_method == m.last_request.method assert expected_request_url == m.last_request.url assert expected_request_body == m.last_request.json() @pytest.mark.parametrize("status_code", (400, 409)) @pytest.mark.parametrize("mock_release_id", range(3)) @pytest.mark.parametrize("prerelease", (True, False)) def test_create_release_fails( default_gitea_client: Gitea, mock_release_id: int, prerelease: bool, status_code: int, ): tag = "v1.0.0" expected_num_requests = 1 expected_http_method = "POST" expected_request_url = "{api_url}/repos/{owner}/{repo_name}/releases".format( api_url=default_gitea_client.api_url, owner=default_gitea_client.owner, repo_name=default_gitea_client.repo_name, ) expected_request_body = { "tag_name": tag, "name": tag, "body": RELEASE_NOTES, "draft": False, "prerelease": prerelease, } with requests_mock.Mocker(session=default_gitea_client.session) as m: # mock the response m.register_uri( "POST", gitea_api_matcher, json={"id": mock_release_id}, status_code=status_code, ) # Execute method under test expecting an exeception to be raised with pytest.raises(HTTPError): default_gitea_client.create_release(tag, RELEASE_NOTES, prerelease) # Evaluate (expected -> actual) assert m.called assert expected_num_requests == len(m.request_history) assert expected_http_method == m.last_request.method assert expected_request_url == m.last_request.url assert expected_request_body == m.last_request.json() @pytest.mark.parametrize("token", (None, "super-token")) def test_should_create_release_using_token_or_netrc( default_gitea_client: Gitea, token: str | None, default_netrc_username: str, default_netrc_password: str, netrc_file: NetrcFileFn, clean_os_environment: dict[str, str], ): # Setup default_gitea_client.token = token default_gitea_client.session.auth = None if not token else TokenAuth(token) tag = "v1.0.0" expected_release_id = 1 expected_num_requests = 1 expected_http_method = "POST" expected_request_url = "{api_url}/repos/{owner}/{repo_name}/releases".format( api_url=default_gitea_client.api_url, owner=default_gitea_client.owner, repo_name=default_gitea_client.repo_name, ) expected_request_body = { "tag_name": tag, "name": tag, "body": RELEASE_NOTES, "draft": False, "prerelease": False, } expected_request_headers = set( ( {"Authorization": f"token {token}"} if token else { "Authorization": _basic_auth_str( default_netrc_username, default_netrc_password ) } ).items() ) # create netrc file # NOTE: write netrc file with DEFAULT_DOMAIN not DEFAULT_API_DOMAIN as can't # handle /api/v1 in file netrc = netrc_file(machine=default_gitea_client.DEFAULT_DOMAIN) mocked_os_environ = {**clean_os_environment, "NETRC": netrc.name} # Monkeypatch to create the Mocked environment with requests_mock.Mocker( session=default_gitea_client.session ) as m, mock.patch.dict(os.environ, mocked_os_environ, clear=True): # mock the response m.register_uri( "POST", gitea_api_matcher, json={"id": expected_release_id}, status_code=201 ) # Execute method under test ret_val = default_gitea_client.create_release(tag, RELEASE_NOTES) # Evaluate (expected -> actual) assert expected_release_id == ret_val assert m.called assert expected_num_requests == len(m.request_history) assert expected_http_method == m.last_request.method assert expected_request_url == m.last_request.url assert expected_request_body == m.last_request.json() # calculate the match between expected and actual headers # We are not looking for an exact match, just that the headers we must have exist shared_headers = expected_request_headers.intersection( set(m.last_request.headers.items()) ) assert expected_request_headers == shared_headers, str.join( os.linesep, [ "Actual headers are missing some of the expected headers", f"Matching: {shared_headers}", f"Missing: {expected_request_headers - shared_headers}", f"Extra: {set(m.last_request.headers.items()) - expected_request_headers}", ], ) def test_request_has_no_auth_header_if_no_token_or_netrc(): tag = "v1.0.0" expected_release_id = 1 expected_num_requests = 1 expected_http_method = "POST" with mock.patch.dict(os.environ, {}, clear=True): client = Gitea(remote_url=f"git@{Gitea.DEFAULT_DOMAIN}:something/somewhere.git") expected_request_url = "{api_url}/repos/{owner}/{repo_name}/releases".format( api_url=client.api_url, owner=client.owner, repo_name=client.repo_name, ) with requests_mock.Mocker(session=client.session) as m: # mock the response m.register_uri("POST", gitea_api_matcher, json={"id": 1}, status_code=201) # Execute method under test ret_val = client.create_release(tag, RELEASE_NOTES) # Evaluate (expected -> actual) assert expected_release_id == ret_val assert m.called assert expected_num_requests == len(m.request_history) assert expected_http_method == m.last_request.method assert expected_request_url == m.last_request.url assert "Authorization" not in m.last_request.headers @pytest.mark.parametrize( "resp_payload, status_code, expected_result", [ ({"id": 420}, 200, 420), ({}, 404, None), ], ) def test_get_release_id_by_tag( default_gitea_client: Gitea, resp_payload: dict[str, int], status_code: int, expected_result: int | None, ): # Setup tag = "v1.0.0" expected_num_requests = 1 expected_http_method = "GET" expected_request_url = ( "{api_url}/repos/{owner}/{repo_name}/releases/tags/{tag}".format( api_url=default_gitea_client.api_url, owner=default_gitea_client.owner, repo_name=default_gitea_client.repo_name, tag=tag, ) ) with requests_mock.Mocker(session=default_gitea_client.session) as m: # mock the response m.register_uri( "GET", gitea_api_matcher, json=resp_payload, status_code=status_code ) # Execute method under test rtn_val = default_gitea_client.get_release_id_by_tag(tag) # Evaluate (expected -> actual) assert expected_result == rtn_val assert m.called assert expected_num_requests == len(m.request_history) assert expected_http_method == m.last_request.method assert expected_request_url == m.last_request.url @pytest.mark.parametrize("status_code", [201]) @pytest.mark.parametrize("mock_release_id", range(3)) def test_edit_release_notes_succeeds( default_gitea_client: Gitea, status_code: int, mock_release_id: int, ): # Setup expected_num_requests = 1 expected_http_method = "PATCH" expected_request_url = ( "{api_url}/repos/{owner}/{repo_name}/releases/{release_id}".format( api_url=default_gitea_client.api_url, owner=default_gitea_client.owner, repo_name=default_gitea_client.repo_name, release_id=mock_release_id, ) ) expected_request_body = {"body": RELEASE_NOTES} with requests_mock.Mocker(session=default_gitea_client.session) as m: # mock the response m.register_uri( "PATCH", gitea_api_matcher, json={"id": mock_release_id}, status_code=status_code, ) # Execute method under test rtn_val = default_gitea_client.edit_release_notes( mock_release_id, RELEASE_NOTES ) # Evaluate (expected -> actual) assert mock_release_id == rtn_val assert m.called assert expected_num_requests == len(m.request_history) assert expected_http_method == m.last_request.method assert expected_request_url == m.last_request.url assert expected_request_body == m.last_request.json() @pytest.mark.parametrize("status_code", (400, 404, 429, 500, 503)) @pytest.mark.parametrize("mock_release_id", range(3)) def test_edit_release_notes_fails( default_gitea_client: Gitea, status_code: int, mock_release_id: int, ): # Setup expected_num_requests = 1 expected_http_method = "PATCH" expected_request_url = ( "{api_url}/repos/{owner}/{repo_name}/releases/{release_id}".format( api_url=default_gitea_client.api_url, owner=default_gitea_client.owner, repo_name=default_gitea_client.repo_name, release_id=mock_release_id, ) ) expected_request_body = {"body": RELEASE_NOTES} with requests_mock.Mocker(session=default_gitea_client.session) as m: # mock the response m.register_uri( "PATCH", gitea_api_matcher, json={"id": mock_release_id}, status_code=status_code, ) # Execute method under test expecting an exception to be raised with pytest.raises(HTTPError): default_gitea_client.edit_release_notes(mock_release_id, RELEASE_NOTES) assert m.called assert expected_num_requests == len(m.request_history) assert expected_http_method == m.last_request.method assert expected_request_url == m.last_request.url assert expected_request_body == m.last_request.json() # Note - mocking as the logic for the create/update of a release # is covered by testing above, no point re-testing. @pytest.mark.parametrize("mock_release_id", range(3)) @pytest.mark.parametrize("prerelease", (True, False)) def test_create_or_update_release_when_create_succeeds( default_gitea_client: Gitea, mock_release_id: int, prerelease: bool, ): tag = "v1.0.0" with mock.patch.object( default_gitea_client, default_gitea_client.create_release.__name__, return_value=mock_release_id, ) as mock_create_release, mock.patch.object( default_gitea_client, default_gitea_client.get_release_id_by_tag.__name__, return_value=mock_release_id, ) as mock_get_release_id_by_tag, mock.patch.object( default_gitea_client, default_gitea_client.edit_release_notes.__name__, return_value=mock_release_id, ) as mock_edit_release_notes: # Execute in mock environment result = default_gitea_client.create_or_update_release( tag, RELEASE_NOTES, prerelease ) # Evaluate (expected -> actual) assert mock_release_id == result mock_create_release.assert_called_once_with(tag, RELEASE_NOTES, prerelease) mock_get_release_id_by_tag.assert_not_called() mock_edit_release_notes.assert_not_called() @pytest.mark.parametrize("mock_release_id", range(3)) @pytest.mark.parametrize("prerelease", (True, False)) def test_create_or_update_release_when_create_fails_and_update_succeeds( default_gitea_client: Gitea, mock_release_id: int, prerelease: bool, ): tag = "v1.0.0" not_found = HTTPError("404 Not Found") not_found.response = Response() not_found.response.status_code = 404 with mock.patch.object( default_gitea_client, default_gitea_client.create_release.__name__, side_effect=not_found, ) as mock_create_release, mock.patch.object( default_gitea_client, default_gitea_client.get_release_id_by_tag.__name__, return_value=mock_release_id, ) as mock_get_release_id_by_tag, mock.patch.object( default_gitea_client, default_gitea_client.edit_release_notes.__name__, return_value=mock_release_id, ) as mock_edit_release_notes: # Execute in mock environment result = default_gitea_client.create_or_update_release( tag, RELEASE_NOTES, prerelease ) # Evaluate (expected -> actual) assert mock_release_id == result mock_create_release.assert_called_once() mock_get_release_id_by_tag.assert_called_once_with(tag) mock_edit_release_notes.assert_called_once_with(mock_release_id, RELEASE_NOTES) @pytest.mark.parametrize("prerelease", (True, False)) def test_create_or_update_release_when_create_fails_and_no_release_for_tag( default_gitea_client: Gitea, prerelease: bool, ): tag = "v1.0.0" not_found = HTTPError("404 Not Found") not_found.response = Response() not_found.response.status_code = 404 with mock.patch.object( default_gitea_client, default_gitea_client.create_release.__name__, side_effect=not_found, ) as mock_create_release, mock.patch.object( default_gitea_client, default_gitea_client.get_release_id_by_tag.__name__, return_value=None, ) as mock_get_release_id_by_tag, mock.patch.object( default_gitea_client, default_gitea_client.edit_release_notes.__name__, return_value=None, ) as mock_edit_release_notes: # Execute in mock environment expecting an exception to be raised with pytest.raises(ValueError): default_gitea_client.create_or_update_release( tag, RELEASE_NOTES, prerelease ) mock_create_release.assert_called_once() mock_get_release_id_by_tag.assert_called_once_with(tag) mock_edit_release_notes.assert_not_called() @pytest.mark.parametrize("status_code", (200, 201)) @pytest.mark.parametrize("mock_release_id", range(3)) @pytest.mark.usefixtures(init_example_project.__name__) def test_upload_release_asset_succeeds( default_gitea_client: Gitea, example_changelog_md: Path, status_code: int, mock_release_id: int, ): # Setup urlparams = {"name": example_changelog_md.name} expected_num_requests = 1 expected_http_method = "POST" expected_request_url = "{url}?{params}".format( url=default_gitea_client.asset_upload_url(mock_release_id), params=urlencode(urlparams), ) expected_changelog = example_changelog_md.read_bytes() with requests_mock.Mocker(session=default_gitea_client.session) as m: m.register_uri( "POST", gitea_api_matcher, json={"status": "ok"}, status_code=status_code ) result = default_gitea_client.upload_release_asset( release_id=mock_release_id, file=example_changelog_md.resolve(), label="doesn't matter could be None", ) # Evaluate (expected -> actual) assert result is True assert m.called assert expected_num_requests == len(m.request_history) assert expected_http_method == m.last_request.method assert expected_request_url == m.last_request.url assert expected_changelog in m.last_request.body @pytest.mark.parametrize("status_code", (400, 500, 503)) @pytest.mark.parametrize("mock_release_id", range(3)) @pytest.mark.usefixtures(init_example_project.__name__) def test_upload_release_asset_fails( default_gitea_client: Gitea, example_changelog_md: Path, status_code: int, mock_release_id: int, ): with requests_mock.Mocker(session=default_gitea_client.session) as m: # mock the response m.register_uri( "POST", gitea_api_matcher, json={"status": "error"}, status_code=status_code ) # Execute method under test expecting an exception to be raised with pytest.raises(HTTPError): default_gitea_client.upload_release_asset( release_id=mock_release_id, file=example_changelog_md.resolve(), label="doesn't matter could be None", ) # Note - mocking as the logic for uploading an asset # is covered by testing above, no point re-testing. def test_upload_dists_when_release_id_not_found(default_gitea_client: Gitea): tag = "v1.0.0" path = "doesn't matter" expected_num_uploads = 0 # Set up mock environment with mock.patch.object( default_gitea_client, default_gitea_client.get_release_id_by_tag.__name__, return_value=None, ) as mock_get_release_id_by_tag, mock.patch.object( default_gitea_client, default_gitea_client.upload_release_asset.__name__ ) as mock_upload_release_asset: # Execute method under test result = default_gitea_client.upload_dists(tag, path) # Evaluate assert expected_num_uploads == result mock_get_release_id_by_tag.assert_called_once_with(tag=tag) mock_upload_release_asset.assert_not_called() @pytest.mark.parametrize( "files, glob_pattern, upload_statuses, expected_num_uploads", [ (["foo.zip", "bar.whl"], "*.zip", [True], 1), (["foo.whl", "foo.egg", "foo.tar.gz"], "foo.*", [True, True, True], 3), # What if not built? ([], "*", [], 0), # What if wrong directory/other stuff in output dir/subfolder? (["specialconfig.yaml", "something.whl", "desc.md"], "*.yaml", [True], 1), (["specialconfig.yaml", "something.whl", "desc.md"], "*.md", [True], 1), ], ) def test_upload_dists_when_release_id_found( default_gitea_client: Gitea, files: list[str], glob_pattern: str, upload_statuses: list[bool], expected_num_uploads: int, ): release_id = 420 tag = "doesn't matter" matching_files = fnmatch.filter(files, glob_pattern) expected_files_uploaded = [mock.call(release_id, fn) for fn in matching_files] # Skip check as the files don't exist in filesystem mocked_isfile = mock.patch.object(os.path, "isfile", return_value=True) mocked_globber = mock.patch.object(glob, "glob", return_value=matching_files) # Set up mock environment with mocked_globber, mocked_isfile, mock.patch.object( default_gitea_client, default_gitea_client.get_release_id_by_tag.__name__, return_value=release_id, ) as mock_get_release_id_by_tag, mock.patch.object( default_gitea_client, default_gitea_client.upload_release_asset.__name__, side_effect=upload_statuses, ) as mock_upload_release_asset: # Execute method under test num_uploads = default_gitea_client.upload_dists(tag, glob_pattern) # Evaluate (expected -> actual) assert expected_num_uploads == num_uploads mock_get_release_id_by_tag.assert_called_once_with(tag=tag) assert expected_files_uploaded == mock_upload_release_asset.call_args_list python-semantic-release-9.21.0/tests/unit/semantic_release/hvcs/test_github.py000066400000000000000000001075431475670435200276540ustar00rootroot00000000000000from __future__ import annotations import fnmatch import glob import os import re from typing import TYPE_CHECKING from unittest import mock from urllib.parse import urlencode import pytest import requests_mock from requests import HTTPError, Response, Session from requests.auth import _basic_auth_str from semantic_release.hvcs.github import Github from semantic_release.hvcs.token_auth import TokenAuth from tests.const import ( EXAMPLE_HVCS_DOMAIN, EXAMPLE_REPO_NAME, EXAMPLE_REPO_OWNER, RELEASE_NOTES, ) from tests.fixtures.example_project import init_example_project if TYPE_CHECKING: from pathlib import Path from typing import Generator from tests.conftest import NetrcFileFn @pytest.fixture def default_gh_client() -> Generator[Github, None, None]: remote_url = ( f"git@{Github.DEFAULT_DOMAIN}:{EXAMPLE_REPO_OWNER}/{EXAMPLE_REPO_NAME}.git" ) with mock.patch.dict(os.environ, {}, clear=True): yield Github(remote_url=remote_url) @pytest.mark.parametrize( str.join( ", ", [ "patched_os_environ", "hvcs_domain", "hvcs_api_domain", "expected_hvcs_domain", "expected_hvcs_api_url", "insecure", ], ), [ ( # Default values (GitHub Enterprise Cloud) {}, None, None, "https://github.com", "https://api.github.com", False, ), ( # Explicitly set default values (GitHub Enterprise Cloud) {}, Github.DEFAULT_DOMAIN, Github.DEFAULT_API_DOMAIN, "https://github.com", "https://api.github.com", False, ), ( # Pull both locations from environment (GitHub Actions on Cloud) { "GITHUB_SERVER_URL": f"https://{Github.DEFAULT_DOMAIN}", "GITHUB_API_URL": f"https://{Github.DEFAULT_API_DOMAIN}", }, None, None, "https://github.com", "https://api.github.com", False, ), ( # Explicitly set custom values with full api path {}, EXAMPLE_HVCS_DOMAIN, f"{EXAMPLE_HVCS_DOMAIN}{Github.DEFAULT_API_PATH_ONPREM}", f"https://{EXAMPLE_HVCS_DOMAIN}", f"https://{EXAMPLE_HVCS_DOMAIN}{Github.DEFAULT_API_PATH_ONPREM}", False, ), ( # Explicitly defined api as subdomain # POSSIBLY WRONG ASSUMPTION of Api path for GitHub Enterprise Server (On Prem) {}, f"https://{EXAMPLE_HVCS_DOMAIN}", f"https://api.{EXAMPLE_HVCS_DOMAIN}", f"https://{EXAMPLE_HVCS_DOMAIN}", f"https://api.{EXAMPLE_HVCS_DOMAIN}{Github.DEFAULT_API_PATH_ONPREM}", False, ), ( # Custom domain with path prefix {}, "special.custom.server/vcs", None, "https://special.custom.server/vcs", "https://special.custom.server/vcs/api/v3", False, ), ( # Gather domain from environment & imply api domain from server domain {"GITHUB_SERVER_URL": "https://special.custom.server/"}, None, None, "https://special.custom.server", "https://special.custom.server/api/v3", False, ), ( # Pull both locations from environment (On-prem Actions Env) { "GITHUB_SERVER_URL": "https://special.custom.server/", "GITHUB_API_URL": "https://special.custom.server/api/v3", }, None, None, "https://special.custom.server", "https://special.custom.server/api/v3", False, ), ( # Ignore environment & use provided parameter value (ie from user config) # then infer api domain from the parameter value based on default GitHub configurations {"GITHUB_SERVER_URL": "https://special.custom.server/vcs/"}, f"https://{EXAMPLE_HVCS_DOMAIN}", None, f"https://{EXAMPLE_HVCS_DOMAIN}", f"https://{EXAMPLE_HVCS_DOMAIN}/api/v3", False, ), ( # Ignore environment & use provided parameter value (ie from user config) {"GITHUB_API_URL": "https://api.special.custom.server/"}, f"https://{EXAMPLE_HVCS_DOMAIN}", f"https://{EXAMPLE_HVCS_DOMAIN}/api/v3", f"https://{EXAMPLE_HVCS_DOMAIN}", f"https://{EXAMPLE_HVCS_DOMAIN}/api/v3", False, ), ( # Allow insecure http connections explicitly {}, f"http://{EXAMPLE_HVCS_DOMAIN}", f"http://{EXAMPLE_HVCS_DOMAIN}/api/v3", f"http://{EXAMPLE_HVCS_DOMAIN}", f"http://{EXAMPLE_HVCS_DOMAIN}/api/v3", True, ), ( # Allow insecure http connections explicitly & imply insecure api domain {}, f"http://{EXAMPLE_HVCS_DOMAIN}", None, f"http://{EXAMPLE_HVCS_DOMAIN}", f"http://{EXAMPLE_HVCS_DOMAIN}/api/v3", True, ), ( # Infer insecure connection from user configuration {}, EXAMPLE_HVCS_DOMAIN, EXAMPLE_HVCS_DOMAIN, f"http://{EXAMPLE_HVCS_DOMAIN}", f"http://{EXAMPLE_HVCS_DOMAIN}/api/v3", True, ), ( # Infer insecure connection from user configuration & imply insecure api domain {}, EXAMPLE_HVCS_DOMAIN, None, f"http://{EXAMPLE_HVCS_DOMAIN}", f"http://{EXAMPLE_HVCS_DOMAIN}/api/v3", True, ), ], ) @pytest.mark.parametrize( "remote_url", [ f"git@{Github.DEFAULT_DOMAIN}:{EXAMPLE_REPO_OWNER}/{EXAMPLE_REPO_NAME}.git", f"https://{Github.DEFAULT_DOMAIN}/{EXAMPLE_REPO_OWNER}/{EXAMPLE_REPO_NAME}.git", ], ) @pytest.mark.parametrize("token", ("abc123", None)) def test_github_client_init( patched_os_environ: dict[str, str], hvcs_domain: str | None, hvcs_api_domain: str | None, expected_hvcs_domain: str, expected_hvcs_api_url: str, remote_url: str, token: str | None, insecure: bool, ): with mock.patch.dict(os.environ, patched_os_environ, clear=True): client = Github( remote_url=remote_url, hvcs_domain=hvcs_domain, hvcs_api_domain=hvcs_api_domain, token=token, allow_insecure=insecure, ) # Evaluate (expected -> actual) assert expected_hvcs_domain == str(client.hvcs_domain) assert expected_hvcs_api_url == str(client.api_url) assert token == client.token assert remote_url == client._remote_url assert hasattr(client, "session") assert isinstance(getattr(client, "session", None), Session) @pytest.mark.parametrize( "hvcs_domain, hvcs_api_domain, insecure", [ # Bad base domain schemes (f"ftp://{EXAMPLE_HVCS_DOMAIN}", None, False), (f"ftp://{EXAMPLE_HVCS_DOMAIN}", None, True), # Unallowed insecure connections when base domain is insecure (f"http://{EXAMPLE_HVCS_DOMAIN}", None, False), # Bad API domain schemes (None, f"ftp://api.{EXAMPLE_HVCS_DOMAIN}", False), (None, f"ftp://api.{EXAMPLE_HVCS_DOMAIN}", True), # Unallowed insecure connections when api domain is insecure (None, f"http://{EXAMPLE_HVCS_DOMAIN}", False), ], ) def test_github_client_init_with_invalid_scheme( hvcs_domain: str | None, hvcs_api_domain: str | None, insecure: bool, ): with pytest.raises(ValueError), mock.patch.dict(os.environ, {}, clear=True): Github( remote_url=f"https://{EXAMPLE_HVCS_DOMAIN}/{EXAMPLE_REPO_OWNER}/{EXAMPLE_REPO_NAME}.git", hvcs_domain=hvcs_domain, hvcs_api_domain=hvcs_api_domain, allow_insecure=insecure, ) @pytest.mark.parametrize( "patched_os_environ, expected_owner, expected_name", [ ({}, None, None), ({"GITHUB_REPOSITORY": "path/to/repo/foo"}, "path/to/repo", "foo"), ], ) def test_github_get_repository_owner_and_name( default_gh_client: Github, patched_os_environ: dict[str, str], expected_owner: str, expected_name: str, ): # expected results should be a tuple[namespace, repo_name] # when None, the default values are used which matches default_gh_client's setup expected_result = ( expected_owner or EXAMPLE_REPO_OWNER, expected_name or EXAMPLE_REPO_NAME, ) with mock.patch.dict(os.environ, patched_os_environ, clear=True): # Execute in mocked environment result = default_gh_client._get_repository_owner_and_name() # Evaluate (expected -> actual) assert expected_result == result def test_compare_url(default_gh_client: Github): # Setup start_rev = "revA" end_rev = "revB" expected_url = "{server}/{owner}/{repo}/compare/{from_rev}...{to_rev}".format( server=default_gh_client.hvcs_domain, owner=default_gh_client.owner, repo=default_gh_client.repo_name, from_rev=start_rev, to_rev=end_rev, ) # Execute method under test actual_url = default_gh_client.compare_url(from_rev=start_rev, to_rev=end_rev) # Evaluate (expected -> actual) assert expected_url == actual_url @pytest.mark.parametrize( "patched_os_environ, use_token, token, remote_url, expected_auth_url", [ ( {"GITHUB_ACTOR": "foo"}, False, "", f"git@{Github.DEFAULT_DOMAIN}:custom/example.git", f"git@{Github.DEFAULT_DOMAIN}:custom/example.git", ), ( {"GITHUB_ACTOR": "foo"}, True, "", f"git@{Github.DEFAULT_DOMAIN}:custom/example.git", f"git@{Github.DEFAULT_DOMAIN}:custom/example.git", ), ( {}, False, "aabbcc", f"git@{Github.DEFAULT_DOMAIN}:custom/example.git", f"git@{Github.DEFAULT_DOMAIN}:custom/example.git", ), ( {}, True, "aabbcc", f"git@{Github.DEFAULT_DOMAIN}:custom/example.git", f"https://aabbcc@{Github.DEFAULT_DOMAIN}/custom/example.git", ), ( {"GITHUB_ACTOR": "foo"}, False, "aabbcc", f"git@{Github.DEFAULT_DOMAIN}:custom/example.git", f"git@{Github.DEFAULT_DOMAIN}:custom/example.git", ), ( {"GITHUB_ACTOR": "foo"}, True, "aabbcc", f"git@{Github.DEFAULT_DOMAIN}:custom/example.git", f"https://foo:aabbcc@{Github.DEFAULT_DOMAIN}/custom/example.git", ), ], ) def test_remote_url( default_gh_client: Github, patched_os_environ: dict[str, str], use_token: bool, token: str, remote_url: str, expected_auth_url: str, ): with mock.patch.dict(os.environ, patched_os_environ, clear=True): default_gh_client._remote_url = remote_url default_gh_client.token = token # Execute method under test & Evaluate (expected -> actual) assert expected_auth_url == default_gh_client.remote_url(use_token=use_token) def test_commit_hash_url(default_gh_client: Github): sha = "hashashash" expected_url = "{server}/{owner}/{repo}/commit/{sha}".format( server=default_gh_client.hvcs_domain.url, owner=default_gh_client.owner, repo=default_gh_client.repo_name, sha=sha, ) assert expected_url == default_gh_client.commit_hash_url(sha) @pytest.mark.parametrize("issue_number", (666, "666", "#666")) def test_issue_url(default_gh_client: Github, issue_number: str | int): expected_url = "{server}/{owner}/{repo}/issues/{issue_num}".format( server=default_gh_client.hvcs_domain.url, owner=default_gh_client.owner, repo=default_gh_client.repo_name, issue_num=str(issue_number).lstrip("#"), ) assert expected_url == default_gh_client.issue_url(issue_num=issue_number) @pytest.mark.parametrize("pr_number", (666, "666", "#666")) def test_pull_request_url(default_gh_client: Github, pr_number: int | str): expected_url = "{server}/{owner}/{repo}/pull/{pr_number}".format( server=default_gh_client.hvcs_domain, owner=default_gh_client.owner, repo=default_gh_client.repo_name, pr_number=str(pr_number).lstrip("#"), ) actual_url = default_gh_client.pull_request_url(pr_number=pr_number) assert expected_url == actual_url ############ # Tests which need http response mocking ############ github_upload_url = f"https://uploads.{Github.DEFAULT_DOMAIN}" github_matcher = re.compile(rf"^https://{Github.DEFAULT_DOMAIN}") github_api_matcher = re.compile(rf"^https://{Github.DEFAULT_API_DOMAIN}") github_upload_matcher = re.compile(rf"^{github_upload_url}") @pytest.mark.parametrize("status_code", (200, 201)) @pytest.mark.parametrize("mock_release_id", range(3)) @pytest.mark.parametrize("prerelease", (True, False)) def test_create_release_succeeds( default_gh_client: Github, mock_release_id: int, prerelease: bool, status_code: int, ): tag = "v1.0.0" expected_num_requests = 1 expected_http_method = "POST" expected_request_url = "{api_url}/repos/{owner}/{repo_name}/releases".format( api_url=default_gh_client.api_url, owner=default_gh_client.owner, repo_name=default_gh_client.repo_name, ) expected_request_body = { "tag_name": tag, "name": tag, "body": RELEASE_NOTES, "draft": False, "prerelease": prerelease, } with requests_mock.Mocker(session=default_gh_client.session) as m: # mock the response m.register_uri( "POST", github_api_matcher, json={"id": mock_release_id}, status_code=status_code, ) # Execute method under test actual_rtn_val = default_gh_client.create_release( tag, RELEASE_NOTES, prerelease ) # Evaluate (expected -> actual) assert mock_release_id == actual_rtn_val assert m.called assert expected_num_requests == len(m.request_history) assert expected_http_method == m.last_request.method assert expected_request_url == m.last_request.url assert expected_request_body == m.last_request.json() @pytest.mark.parametrize("status_code", (400, 404, 429, 500, 503)) @pytest.mark.parametrize("mock_release_id", range(3)) @pytest.mark.parametrize("prerelease", (True, False)) def test_create_release_fails( default_gh_client: Github, mock_release_id: int, prerelease: bool, status_code: int, ): tag = "v1.0.0" expected_num_requests = 1 expected_http_method = "POST" expected_request_url = "{api_url}/repos/{owner}/{repo_name}/releases".format( api_url=default_gh_client.api_url, owner=default_gh_client.owner, repo_name=default_gh_client.repo_name, ) expected_request_body = { "tag_name": tag, "name": tag, "body": RELEASE_NOTES, "draft": False, "prerelease": prerelease, } with requests_mock.Mocker(session=default_gh_client.session) as m: # mock the response m.register_uri( "POST", github_api_matcher, json={"id": mock_release_id}, status_code=status_code, ) # Execute method under test expecting an exeception to be raised with pytest.raises(HTTPError): default_gh_client.create_release(tag, RELEASE_NOTES, prerelease) # Evaluate (expected -> actual) assert m.called assert expected_num_requests == len(m.request_history) assert expected_http_method == m.last_request.method assert expected_request_url == m.last_request.url assert expected_request_body == m.last_request.json() @pytest.mark.parametrize("token", (None, "super-token")) def test_should_create_release_using_token_or_netrc( default_gh_client: Github, token: str | None, default_netrc_username: str, default_netrc_password: str, netrc_file: NetrcFileFn, clean_os_environment: dict[str, str], ): # Setup default_gh_client.token = token default_gh_client.session.auth = None if not token else TokenAuth(token) tag = "v1.0.0" expected_release_id = 1 expected_num_requests = 1 expected_http_method = "POST" expected_request_url = "{api_url}/repos/{owner}/{repo_name}/releases".format( api_url=default_gh_client.api_url, owner=default_gh_client.owner, repo_name=default_gh_client.repo_name, ) expected_request_body = { "tag_name": tag, "name": tag, "body": RELEASE_NOTES, "draft": False, "prerelease": False, } expected_request_headers = set( ( {"Authorization": f"token {token}"} if token else { "Authorization": _basic_auth_str( default_netrc_username, default_netrc_password ) } ).items() ) # create netrc file netrc = netrc_file(machine=default_gh_client.DEFAULT_API_DOMAIN) mocked_os_environ = {**clean_os_environment, "NETRC": netrc.name} # Monkeypatch to create the Mocked environment with requests_mock.Mocker(session=default_gh_client.session) as m, mock.patch.dict( os.environ, mocked_os_environ, clear=True ): # mock the response m.register_uri( "POST", github_api_matcher, json={"id": expected_release_id}, status_code=201, ) # Execute method under test ret_val = default_gh_client.create_release(tag, RELEASE_NOTES) # Evaluate (expected -> actual) assert expected_release_id == ret_val assert m.called assert expected_num_requests == len(m.request_history) assert expected_http_method == m.last_request.method assert expected_request_url == m.last_request.url assert expected_request_body == m.last_request.json() # calculate the match between expected and actual headers # We are not looking for an exact match, just that the headers we must have exist shared_headers = expected_request_headers.intersection( set(m.last_request.headers.items()) ) assert expected_request_headers == shared_headers, str.join( os.linesep, [ "Actual headers are missing some of the expected headers", f"Matching: {shared_headers}", f"Missing: {expected_request_headers - shared_headers}", f"Extra: {set(m.last_request.headers.items()) - expected_request_headers}", ], ) def test_request_has_no_auth_header_if_no_token_or_netrc(): tag = "v1.0.0" expected_release_id = 1 expected_num_requests = 1 expected_http_method = "POST" with mock.patch.dict(os.environ, {}, clear=True): client = Github( remote_url=f"git@{Github.DEFAULT_DOMAIN}:something/somewhere.git" ) expected_request_url = "{api_url}/repos/{owner}/{repo_name}/releases".format( api_url=client.api_url, owner=client.owner, repo_name=client.repo_name, ) with requests_mock.Mocker(session=client.session) as m: # mock the response m.register_uri("POST", github_api_matcher, json={"id": 1}, status_code=201) # Execute method under test rtn_val = client.create_release(tag, RELEASE_NOTES) # Evaluate (expected -> actual) assert expected_release_id == rtn_val assert m.called assert expected_num_requests == len(m.request_history) assert expected_http_method == m.last_request.method assert expected_request_url == m.last_request.url assert "Authorization" not in m.last_request.headers @pytest.mark.parametrize("status_code", [201]) @pytest.mark.parametrize("mock_release_id", range(3)) def test_edit_release_notes_succeeds( default_gh_client: Github, status_code: int, mock_release_id: int, ): # Setup expected_num_requests = 1 expected_http_method = "POST" expected_request_url = ( "{api_url}/repos/{owner}/{repo_name}/releases/{release_id}".format( api_url=default_gh_client.api_url, owner=default_gh_client.owner, repo_name=default_gh_client.repo_name, release_id=mock_release_id, ) ) expected_request_body = {"body": RELEASE_NOTES} with requests_mock.Mocker(session=default_gh_client.session) as m: # mock the response m.register_uri( "POST", github_api_matcher, json={"id": mock_release_id}, status_code=status_code, ) # Execute method under test rtn_val = default_gh_client.edit_release_notes(mock_release_id, RELEASE_NOTES) # Evaluate (expected -> actual) assert mock_release_id == rtn_val assert m.called assert expected_num_requests == len(m.request_history) assert expected_http_method == m.last_request.method assert expected_request_url == m.last_request.url assert expected_request_body == m.last_request.json() @pytest.mark.parametrize("status_code", (400, 404, 429, 500, 503)) @pytest.mark.parametrize("mock_release_id", range(3)) def test_edit_release_notes_fails( default_gh_client: Github, status_code: int, mock_release_id: int ): # Setup expected_num_requests = 1 expected_http_method = "POST" expected_request_url = ( "{api_url}/repos/{owner}/{repo_name}/releases/{release_id}".format( api_url=default_gh_client.api_url, owner=default_gh_client.owner, repo_name=default_gh_client.repo_name, release_id=mock_release_id, ) ) expected_request_body = {"body": RELEASE_NOTES} with requests_mock.Mocker(session=default_gh_client.session) as m: # mock the response m.register_uri( "POST", github_api_matcher, json={"id": mock_release_id}, status_code=status_code, ) # Execute method under test expecting an exception to be raised with pytest.raises(HTTPError): default_gh_client.edit_release_notes(mock_release_id, RELEASE_NOTES) # Evaluate (expected -> actual) assert m.called assert expected_num_requests == len(m.request_history) assert expected_http_method == m.last_request.method assert expected_request_url == m.last_request.url assert expected_request_body == m.last_request.json() @pytest.mark.parametrize( "resp_payload, status_code, expected_result", [ ({"id": 420, "status": "success"}, 200, 420), ({"error": "not found"}, 404, None), ({"error": "too many requests"}, 429, None), ({"error": "internal error"}, 500, None), ({"error": "temporarily unavailable"}, 503, None), ], ) def test_get_release_id_by_tag( default_gh_client: Github, resp_payload: dict[str, int], status_code: int, expected_result: int | None, ): # Setup tag = "v1.0.0" expected_num_requests = 1 expected_http_method = "GET" expected_request_url = ( "{api_url}/repos/{owner}/{repo_name}/releases/tags/{tag}".format( api_url=default_gh_client.api_url, owner=default_gh_client.owner, repo_name=default_gh_client.repo_name, tag=tag, ) ) with requests_mock.Mocker(session=default_gh_client.session) as m: # mock the response m.register_uri( "GET", github_api_matcher, json=resp_payload, status_code=status_code ) # Execute method under test rtn_val = default_gh_client.get_release_id_by_tag(tag) # Evaluate (expected -> actual) assert expected_result == rtn_val assert m.called assert expected_num_requests == len(m.request_history) assert expected_http_method == m.last_request.method assert expected_request_url == m.last_request.url # Note - mocking as the logic for the create/update of a release # is covered by testing above, no point re-testing. @pytest.mark.parametrize("mock_release_id", range(3)) @pytest.mark.parametrize("prerelease", (True, False)) def test_create_or_update_release_when_create_succeeds( default_gh_client: Github, mock_release_id: int, prerelease: bool, ): tag = "v1.0.0" with mock.patch.object( default_gh_client, default_gh_client.create_release.__name__, return_value=mock_release_id, ) as mock_create_release, mock.patch.object( default_gh_client, default_gh_client.get_release_id_by_tag.__name__, return_value=mock_release_id, ) as mock_get_release_id_by_tag, mock.patch.object( default_gh_client, default_gh_client.edit_release_notes.__name__, return_value=mock_release_id, ) as mock_edit_release_notes: # Execute in mock environment result = default_gh_client.create_or_update_release( tag, RELEASE_NOTES, prerelease ) # Evaluate (expected -> actual) assert mock_release_id == result mock_create_release.assert_called_once_with(tag, RELEASE_NOTES, prerelease) mock_get_release_id_by_tag.assert_not_called() mock_edit_release_notes.assert_not_called() @pytest.mark.parametrize("mock_release_id", range(3)) @pytest.mark.parametrize("prerelease", (True, False)) def test_create_or_update_release_when_create_fails_and_update_succeeds( default_gh_client: Github, mock_release_id: int, prerelease: bool, ): tag = "v1.0.0" not_found = HTTPError("404 Not Found") not_found.response = Response() not_found.response.status_code = 404 with mock.patch.object( default_gh_client, default_gh_client.create_release.__name__, side_effect=not_found, ) as mock_create_release, mock.patch.object( default_gh_client, default_gh_client.get_release_id_by_tag.__name__, return_value=mock_release_id, ) as mock_get_release_id_by_tag, mock.patch.object( default_gh_client, default_gh_client.edit_release_notes.__name__, return_value=mock_release_id, ) as mock_edit_release_notes: # Execute in mock environment result = default_gh_client.create_or_update_release( tag, RELEASE_NOTES, prerelease ) # Evaluate (expected -> actual) assert mock_release_id == result mock_create_release.assert_called_once() mock_get_release_id_by_tag.assert_called_once_with(tag) mock_edit_release_notes.assert_called_once_with(mock_release_id, RELEASE_NOTES) @pytest.mark.parametrize("prerelease", (True, False)) def test_create_or_update_release_when_create_fails_and_no_release_for_tag( default_gh_client: Github, prerelease: bool, ): tag = "v1.0.0" not_found = HTTPError("404 Not Found") not_found.response = Response() not_found.response.status_code = 404 with mock.patch.object( default_gh_client, default_gh_client.create_release.__name__, side_effect=not_found, ) as mock_create_release, mock.patch.object( default_gh_client, default_gh_client.get_release_id_by_tag.__name__, return_value=None, ) as mock_get_release_id_by_tag, mock.patch.object( default_gh_client, default_gh_client.edit_release_notes.__name__, return_value=None, ) as mock_edit_release_notes: # Execute in mock environment expecting an exception to be raised with pytest.raises(ValueError): default_gh_client.create_or_update_release(tag, RELEASE_NOTES, prerelease) mock_create_release.assert_called_once() mock_get_release_id_by_tag.assert_called_once_with(tag) mock_edit_release_notes.assert_not_called() def test_asset_upload_url(default_gh_client: Github): release_id = 1 expected_num_requests = 1 expected_http_method = "GET" expected_asset_upload_request_url = ( "{api_url}/repos/{owner}/{repo}/releases/{release_id}".format( api_url=default_gh_client.api_url, owner=default_gh_client.owner, repo=default_gh_client.repo_name, release_id=release_id, ) ) mocked_upload_url = ( "{upload_domain}/repos/{owner}/{repo}/releases/{release_id}/assets".format( upload_domain=github_upload_url, owner=default_gh_client.owner, repo=default_gh_client.repo_name, release_id=release_id, ) ) # '{?name,label}' are added by github.com at least, maybe custom too # https://docs.github.com/en/rest/releases/releases?apiVersion=2022-11-28#get-a-release resp_payload = { "upload_url": mocked_upload_url + "{?name,label}", "status": "success", } with requests_mock.Mocker(session=default_gh_client.session) as m: # mock the response m.register_uri("GET", github_api_matcher, json=resp_payload, status_code=200) # Execute method under test result = default_gh_client.asset_upload_url(release_id) # Evaluate (expected -> actual) assert mocked_upload_url == result assert m.called assert expected_num_requests == len(m.request_history) assert expected_http_method == m.last_request.method assert expected_asset_upload_request_url == m.last_request.url @pytest.mark.parametrize("status_code", (200, 201)) @pytest.mark.parametrize("mock_release_id", range(3)) @pytest.mark.usefixtures(init_example_project.__name__) def test_upload_release_asset_succeeds( default_gh_client: Github, example_changelog_md: Path, status_code: int, mock_release_id: int, ): # Setup label = "abc123" urlparams = {"name": example_changelog_md.name, "label": label} release_upload_url = ( "{upload_domain}/repos/{owner}/{repo}/releases/{release_id}/assets".format( upload_domain=github_upload_url, owner=default_gh_client.owner, repo=default_gh_client.repo_name, release_id=mock_release_id, ) ) expected_num_requests = 2 expected_retrieve_upload_url_method = "GET" expected_upload_http_method = "POST" expected_upload_url = "{url}?{params}".format( url=release_upload_url, params=urlencode(urlparams), ) expected_changelog = example_changelog_md.read_bytes() json_get_up_url = { "status": "ok", "upload_url": release_upload_url + "{?name,label}", } with requests_mock.Mocker(session=default_gh_client.session) as m: # mock the responses m.register_uri( "POST", github_upload_matcher, json={"status": "ok"}, status_code=status_code, ) m.register_uri( "GET", github_api_matcher, json=json_get_up_url, status_code=status_code ) # Execute method under test result = default_gh_client.upload_release_asset( release_id=mock_release_id, file=example_changelog_md.resolve(), label=label, ) # Evaluate (expected -> actual) assert result is True assert m.called assert expected_num_requests == len(m.request_history) get_req, post_req = m.request_history assert expected_retrieve_upload_url_method == get_req.method assert expected_upload_http_method == post_req.method assert expected_upload_url == post_req.url assert expected_changelog == post_req.body @pytest.mark.parametrize("status_code", (400, 404, 429, 500, 503)) @pytest.mark.parametrize("mock_release_id", range(3)) @pytest.mark.usefixtures(init_example_project.__name__) def test_upload_release_asset_fails( default_gh_client: Github, example_changelog_md: Path, status_code: int, mock_release_id: int, ): # Setup label = "abc123" upload_url = "{up_url}/repos/{owner}/{repo_name}/releases/{release_id}".format( up_url=github_upload_url, owner=default_gh_client.owner, repo_name=default_gh_client.repo_name, release_id=mock_release_id, ) json_get_up_url = { "status": "ok", "upload_url": upload_url, } with requests_mock.Mocker(session=default_gh_client.session) as m: # mock the responses m.register_uri( "POST", github_upload_matcher, json={"message": "error"}, status_code=status_code, ) m.register_uri("GET", github_api_matcher, json=json_get_up_url, status_code=200) # Execute method under test expecting an exception to be raised with pytest.raises(HTTPError): default_gh_client.upload_release_asset( release_id=mock_release_id, file=example_changelog_md.resolve(), label=label, ) # Note - mocking as the logic for uploading an asset # is covered by testing above, no point re-testing. def test_upload_dists_when_release_id_not_found(default_gh_client): tag = "v1.0.0" path = "doesn't matter" expected_num_uploads = 0 # Set up mock environment with mock.patch.object( default_gh_client, default_gh_client.get_release_id_by_tag.__name__, return_value=None, ) as mock_get_release_id_by_tag, mock.patch.object( default_gh_client, default_gh_client.upload_release_asset.__name__ ) as mock_upload_release_asset: # Execute method under test result = default_gh_client.upload_dists(tag, path) # Evaluate assert expected_num_uploads == result mock_get_release_id_by_tag.assert_called_once_with(tag=tag) mock_upload_release_asset.assert_not_called() @pytest.mark.parametrize( "files, glob_pattern, upload_statuses, expected_num_uploads", [ (["foo.zip", "bar.whl"], "*.zip", [True], 1), (["foo.whl", "foo.egg", "foo.tar.gz"], "foo.*", [True, True, True], 3), # What if not built? ([], "*", [], 0), # What if wrong directory/other stuff in output dir/subfolder? (["specialconfig.yaml", "something.whl", "desc.md"], "*.yaml", [True], 1), (["specialconfig.yaml", "something.whl", "desc.md"], "*.md", [True], 1), ], ) def test_upload_dists_when_release_id_found( default_gh_client: Github, files: list[str], glob_pattern: str, upload_statuses: list[bool], expected_num_uploads: int, ): release_id = 420 tag = "doesn't matter" matching_files = fnmatch.filter(files, glob_pattern) expected_files_uploaded = [mock.call(release_id, fn) for fn in matching_files] # Skip check as the files don't exist in filesystem mocked_isfile = mock.patch.object(os.path, "isfile", return_value=True) mocked_globber = mock.patch.object(glob, "glob", return_value=matching_files) # Set up mock environment with mocked_globber, mocked_isfile, mock.patch.object( default_gh_client, default_gh_client.get_release_id_by_tag.__name__, return_value=release_id, ) as mock_get_release_id_by_tag, mock.patch.object( default_gh_client, default_gh_client.upload_release_asset.__name__, side_effect=upload_statuses, ) as mock_upload_release_asset: # Execute method under test num_uploads = default_gh_client.upload_dists(tag, glob_pattern) # Evaluate (expected -> actual) assert expected_num_uploads == num_uploads mock_get_release_id_by_tag.assert_called_once_with(tag=tag) assert expected_files_uploaded == mock_upload_release_asset.call_args_list python-semantic-release-9.21.0/tests/unit/semantic_release/hvcs/test_gitlab.py000066400000000000000000000401701475670435200276240ustar00rootroot00000000000000from __future__ import annotations import os from typing import TYPE_CHECKING from unittest import mock import gitlab import gitlab.exceptions import gitlab.mixins import gitlab.v4.objects import pytest from semantic_release.hvcs.gitlab import Gitlab from tests.const import ( EXAMPLE_HVCS_DOMAIN, EXAMPLE_REPO_NAME, EXAMPLE_REPO_OWNER, RELEASE_NOTES, ) if TYPE_CHECKING: from typing import Generator # Note: there's nothing special about the value of these variables, # they're just constants for easier consistency with the faked objects A_GOOD_TAG = "v1.2.3" A_BAD_TAG = "v2.1.1-rc.1" A_LOCKED_TAG = "v0.9.0" A_MISSING_TAG = "v1.0.0+missing" # But note this is the only ref we're making a "fake" commit for, so # tests which need to query the remote for "a" ref, the exact sha for # which doesn't matter, all use this constant REF = "hashashash" @pytest.fixture def default_gl_project(example_git_https_url: str): return gitlab.Gitlab(url=example_git_https_url).projects.get( f"{EXAMPLE_REPO_OWNER}/{EXAMPLE_REPO_NAME}", lazy=True ) @pytest.fixture def default_gl_client( example_git_https_url: str, default_gl_project: gitlab.v4.objects.Project, ) -> Generator[Gitlab, None, None]: gitlab_client = Gitlab(remote_url=example_git_https_url) # make sure that when project tries to get the project instance, we return the mock # that we control project_get_mock = mock.patch.object( gitlab_client._client.projects, gitlab_client._client.projects.get.__name__, return_value=default_gl_project, ) env_mock = mock.patch.dict(os.environ, {}, clear=True) with project_get_mock, env_mock: yield gitlab_client @pytest.mark.parametrize( "patched_os_environ, hvcs_domain, expected_hvcs_domain, insecure", # NOTE: GitLab does not have a different api domain [ # Default values ({}, None, f"https://{Gitlab.DEFAULT_DOMAIN}", False), ( # Gather domain from environment {"CI_SERVER_URL": "https://special.custom.server/"}, None, "https://special.custom.server", False, ), ( # Custom domain with path prefix (derives from environment) {"CI_SERVER_URL": "https://special.custom.server/vcs/"}, None, "https://special.custom.server/vcs", False, ), ( # Ignore environment & use provided parameter value (ie from user config) { "CI_SERVER_URL": "https://special.custom.server/", "CI_API_V4_URL": "https://special.custom.server/api/v3", }, f"https://{EXAMPLE_HVCS_DOMAIN}", f"https://{EXAMPLE_HVCS_DOMAIN}", False, ), ( # Allow insecure http connections explicitly {}, f"http://{EXAMPLE_HVCS_DOMAIN}", f"http://{EXAMPLE_HVCS_DOMAIN}", True, ), ( # Infer insecure connection from user configuration {}, EXAMPLE_HVCS_DOMAIN, f"http://{EXAMPLE_HVCS_DOMAIN}", True, ), ], ) @pytest.mark.parametrize( "remote_url", [ f"git@{Gitlab.DEFAULT_DOMAIN}:{EXAMPLE_REPO_OWNER}/{EXAMPLE_REPO_NAME}.git", f"https://{Gitlab.DEFAULT_DOMAIN}/{EXAMPLE_REPO_OWNER}/{EXAMPLE_REPO_NAME}.git", ], ) @pytest.mark.parametrize("token", ("abc123", None)) def test_gitlab_client_init( patched_os_environ: dict[str, str], hvcs_domain: str | None, expected_hvcs_domain: str, remote_url: str, token: str | None, insecure: bool, ): with mock.patch.dict(os.environ, patched_os_environ, clear=True): client = Gitlab( remote_url=remote_url, hvcs_domain=hvcs_domain, token=token, allow_insecure=insecure, ) # Evaluate (expected -> actual) assert expected_hvcs_domain == client.hvcs_domain.url assert token == client.token assert remote_url == client._remote_url @pytest.mark.parametrize( "hvcs_domain, insecure", [ (f"ftp://{EXAMPLE_HVCS_DOMAIN}", False), (f"ftp://{EXAMPLE_HVCS_DOMAIN}", True), (f"http://{EXAMPLE_HVCS_DOMAIN}", False), ], ) def test_gitlab_client_init_with_invalid_scheme( hvcs_domain: str, insecure: bool, ): with pytest.raises(ValueError), mock.patch.dict(os.environ, {}, clear=True): Gitlab( remote_url=f"https://{EXAMPLE_HVCS_DOMAIN}/{EXAMPLE_REPO_OWNER}/{EXAMPLE_REPO_NAME}.git", hvcs_domain=hvcs_domain, allow_insecure=insecure, ) @pytest.mark.parametrize( "patched_os_environ, expected_owner, expected_name", [ ({}, None, None), ( {"CI_PROJECT_NAMESPACE": "path/to/repo", "CI_PROJECT_NAME": "foo"}, "path/to/repo", "foo", ), ], ) def test_gitlab_get_repository_owner_and_name( default_gl_client: Gitlab, example_git_https_url: str, patched_os_environ: dict[str, str], expected_owner: str | None, expected_name: str | None, ): # expected results should be a tuple[namespace, repo_name] and if both are None, # then the default value from GitLab class should be used expected_result = (expected_owner, expected_name) if expected_owner is None and expected_name is None: expected_result = super( Gitlab, default_gl_client )._get_repository_owner_and_name() with mock.patch.dict(os.environ, patched_os_environ, clear=True): # Execute in mocked environment result = Gitlab( remote_url=example_git_https_url, )._get_repository_owner_and_name() # Evaluate (expected -> actual) assert expected_result == result @pytest.mark.parametrize( "use_token, token, remote_url, expected_auth_url", [ ( False, "", f"git@{Gitlab.DEFAULT_DOMAIN}:custom/example.git", f"git@{Gitlab.DEFAULT_DOMAIN}:custom/example.git", ), ( True, "", f"git@{Gitlab.DEFAULT_DOMAIN}:custom/example.git", f"git@{Gitlab.DEFAULT_DOMAIN}:custom/example.git", ), ( False, "aabbcc", f"git@{Gitlab.DEFAULT_DOMAIN}:custom/example.git", f"git@{Gitlab.DEFAULT_DOMAIN}:custom/example.git", ), ( True, "aabbcc", f"git@{Gitlab.DEFAULT_DOMAIN}:custom/example.git", f"https://gitlab-ci-token:aabbcc@{Gitlab.DEFAULT_DOMAIN}/custom/example.git", ), ], ) def test_remote_url( use_token: bool, token: str, remote_url: str, expected_auth_url: str, ): with mock.patch.dict(os.environ, {}, clear=True): gl_client = Gitlab(remote_url=remote_url, token=token) assert expected_auth_url == gl_client.remote_url(use_token=use_token) def test_compare_url(default_gl_client: Gitlab): start_rev = "revA" end_rev = "revB" expected_url = "{server}/{owner}/{repo}/-/compare/{from_rev}...{to_rev}".format( server=default_gl_client.hvcs_domain.url, owner=default_gl_client.owner, repo=default_gl_client.repo_name, from_rev=start_rev, to_rev=end_rev, ) actual_url = default_gl_client.compare_url(from_rev=start_rev, to_rev=end_rev) assert expected_url == actual_url def test_commit_hash_url(default_gl_client: Gitlab): expected_url = "{server}/{owner}/{repo}/-/commit/{sha}".format( server=default_gl_client.hvcs_domain.url, owner=default_gl_client.owner, repo=default_gl_client.repo_name, sha=REF, ) assert expected_url == default_gl_client.commit_hash_url(REF) @pytest.mark.parametrize("issue_number", (666, "666", "#666")) def test_issue_url(default_gl_client: Gitlab, issue_number: int | str): expected_url = "{server}/{owner}/{repo}/-/issues/{issue_num}".format( server=default_gl_client.hvcs_domain.url, owner=default_gl_client.owner, repo=default_gl_client.repo_name, issue_num=str(issue_number).lstrip("#"), ) actual_url = default_gl_client.issue_url(issue_num=issue_number) assert expected_url == actual_url @pytest.mark.parametrize("pr_number", (666, "666", "!666")) def test_pull_request_url(default_gl_client: Gitlab, pr_number: int | str): expected_url = "{server}/{owner}/{repo}/-/merge_requests/{pr_number}".format( server=default_gl_client.hvcs_domain.url, owner=default_gl_client.owner, repo=default_gl_client.repo_name, pr_number=str(pr_number).lstrip("!"), ) actual_url = default_gl_client.pull_request_url(pr_number=pr_number) assert expected_url == actual_url @pytest.mark.parametrize("tag", (A_GOOD_TAG, A_LOCKED_TAG)) def test_create_release_succeeds( default_gl_client: Gitlab, default_gl_project: gitlab.v4.objects.Project, tag: str ): with mock.patch.object( default_gl_project.releases, default_gl_project.releases.create.__name__, ) as mocked_create_release: result = default_gl_client.create_release(tag, RELEASE_NOTES) assert tag == result mocked_create_release.assert_called_once_with( { "name": tag, "tag_name": tag, "tag_message": tag, "description": RELEASE_NOTES, } ) def test_create_release_fails_with_bad_tag( default_gl_client: Gitlab, default_gl_project: gitlab.v4.objects.Project, ): bad_request = gitlab.GitlabCreateError("401 Unauthorized") mock_failed_create = mock.patch.object( default_gl_project.releases, default_gl_project.releases.create.__name__, side_effect=bad_request, ) with mock_failed_create, pytest.raises(gitlab.GitlabCreateError): default_gl_client.create_release(A_BAD_TAG, RELEASE_NOTES) @pytest.mark.parametrize("tag", (A_GOOD_TAG, A_LOCKED_TAG)) def test_update_release_succeeds(default_gl_client: Gitlab, tag: str): fake_release_obj = gitlab.v4.objects.ProjectReleaseManager( default_gl_client._client ).get(tag, lazy=True) fake_release_obj._attrs["name"] = tag with mock.patch.object( gitlab.mixins.SaveMixin, gitlab.mixins.SaveMixin.save.__name__, ) as mocked_update_release: release_id = default_gl_client.edit_release_notes( fake_release_obj, RELEASE_NOTES ) assert tag == release_id mocked_update_release.assert_called_once() assert RELEASE_NOTES == fake_release_obj.description # noqa: SIM300 def test_update_release_fails_with_missing_tag( default_gl_client: Gitlab, default_gl_project: gitlab.v4.objects.Project, ): fake_release_obj = gitlab.v4.objects.ProjectRelease( default_gl_project.manager, {"id": A_MISSING_TAG, "name": A_MISSING_TAG}, lazy=True, ) mocked_update_release = mock.patch.object( gitlab.mixins.SaveMixin, gitlab.mixins.SaveMixin.save.__name__, side_effect=gitlab.GitlabUpdateError, ) with mocked_update_release, pytest.raises(gitlab.GitlabUpdateError): default_gl_client.edit_release_notes(fake_release_obj, RELEASE_NOTES) @pytest.mark.parametrize("prerelease", (True, False)) def test_create_or_update_release_when_create_succeeds( default_gl_client: Gitlab, prerelease: bool ): with mock.patch.object( default_gl_client, default_gl_client.create_release.__name__, return_value=A_GOOD_TAG, ) as mock_create_release, mock.patch.object( default_gl_client, default_gl_client.edit_release_notes.__name__, return_value=A_GOOD_TAG, ) as mock_edit_release_notes: # Execute in mock environment result = default_gl_client.create_or_update_release( A_GOOD_TAG, RELEASE_NOTES, prerelease ) # Evaluate (expected -> actual) assert A_GOOD_TAG == result # noqa: SIM300 mock_create_release.assert_called_once_with( tag=A_GOOD_TAG, release_notes=RELEASE_NOTES, prerelease=prerelease ) mock_edit_release_notes.assert_not_called() def test_get_release_id_by_tag( default_gl_client: Gitlab, default_gl_project: gitlab.v4.objects.Project, ): dummy_release = default_gl_project.releases.get(A_GOOD_TAG, lazy=True) with mock.patch.object( default_gl_project.releases, default_gl_project.releases.get.__name__, return_value=dummy_release, ) as mocked_get_release_id: result = default_gl_client.get_release_by_tag(A_GOOD_TAG) assert dummy_release == result mocked_get_release_id.assert_called_once_with(A_GOOD_TAG) def test_get_release_id_by_tag_fails( default_gl_client: Gitlab, default_gl_project: gitlab.v4.objects.Project, ): mocked_get_release_id = mock.patch.object( default_gl_project.releases, default_gl_project.releases.get.__name__, side_effect=gitlab.exceptions.GitlabAuthenticationError, ) with pytest.raises( gitlab.exceptions.GitlabAuthenticationError ), mocked_get_release_id: default_gl_client.get_release_by_tag(A_GOOD_TAG) def test_get_release_id_by_tag_not_found( default_gl_client: Gitlab, default_gl_project: gitlab.v4.objects.Project, ): mocked_get_release_id = mock.patch.object( default_gl_project.releases, default_gl_project.releases.get.__name__, side_effect=gitlab.exceptions.GitlabGetError, ) with mocked_get_release_id: result = default_gl_client.get_release_by_tag(A_GOOD_TAG) assert result is None @pytest.mark.parametrize("prerelease", (True, False)) def test_create_or_update_release_when_create_fails_and_update_succeeds( default_gl_client: Gitlab, prerelease: bool, ): bad_request = gitlab.GitlabCreateError("400 Bad Request") expected_release_obj = gitlab.v4.objects.ProjectRelease( gitlab.v4.objects.ProjectManager(default_gl_client._client), {"commit": {"id": "1"}, "name": A_GOOD_TAG}, lazy=True, ) with mock.patch.object( default_gl_client, default_gl_client.create_release.__name__, side_effect=bad_request, ), mock.patch.object( default_gl_client, default_gl_client.get_release_by_tag.__name__, return_value=expected_release_obj, ), mock.patch.object( default_gl_client, default_gl_client.edit_release_notes.__name__, return_value=A_GOOD_TAG, ) as mock_edit_release_notes: # Execute in mock environment default_gl_client.create_or_update_release( A_GOOD_TAG, RELEASE_NOTES, prerelease ) # Evaluate (expected -> actual) mock_edit_release_notes.assert_called_once_with( release=expected_release_obj, release_notes=RELEASE_NOTES ) @pytest.mark.parametrize("prerelease", (True, False)) def test_create_or_update_release_when_create_fails_and_update_fails( default_gl_client: Gitlab, prerelease: bool, ): bad_request = gitlab.GitlabCreateError("400 Bad Request") not_found = gitlab.GitlabUpdateError("404 Not Found") fake_release_obj = gitlab.v4.objects.ProjectRelease( gitlab.v4.objects.ProjectManager(default_gl_client._client), {"commit": {"id": "1"}, "name": A_GOOD_TAG}, lazy=True, ) create_release_patch = mock.patch.object( default_gl_client, default_gl_client.create_release.__name__, side_effect=bad_request, ) edit_release_notes_patch = mock.patch.object( default_gl_client, default_gl_client.edit_release_notes.__name__, side_effect=not_found, ) get_release_by_id_patch = mock.patch.object( default_gl_client, default_gl_client.get_release_by_tag.__name__, return_value=fake_release_obj, ) # Execute in mocked environment expecting a GitlabUpdateError to be raised with create_release_patch, edit_release_notes_patch, get_release_by_id_patch: # noqa: SIM117 with pytest.raises(gitlab.GitlabUpdateError): default_gl_client.create_or_update_release( A_GOOD_TAG, RELEASE_NOTES, prerelease ) python-semantic-release-9.21.0/tests/unit/semantic_release/hvcs/test_token_auth.py000066400000000000000000000017061475670435200305250ustar00rootroot00000000000000import pytest from requests import Request from semantic_release.hvcs.token_auth import TokenAuth @pytest.fixture def example_request(): return Request( "GET", url="http://example.com", headers={ "User-Agent": "Python3", "Content-Type": "application/json", "Accept": "application/json", }, ) def test_token_eq(): t1 = TokenAuth("foo") t2 = TokenAuth("foo") assert t1 == t2 def test_token_neq(): t1 = TokenAuth("foo") t2 = TokenAuth("bar") assert t1 != t2 def test_call_token_auth_sets_headers(example_request): old_headers = example_request.headers.copy() old_headers.pop("Authorization", None) t1 = TokenAuth("foo") new_req = t1(example_request) auth_header = new_req.headers.pop("Authorization") assert auth_header == "token foo" assert new_req.headers == old_headers assert new_req.__dict__ == example_request.__dict__ python-semantic-release-9.21.0/tests/unit/semantic_release/hvcs/test_util.py000066400000000000000000000000001475670435200273230ustar00rootroot00000000000000python-semantic-release-9.21.0/tests/unit/semantic_release/test_helpers.py000066400000000000000000000233441475670435200270650ustar00rootroot00000000000000from typing import Iterable import pytest from semantic_release.helpers import ParsedGitUrl, parse_git_url, sort_numerically @pytest.mark.parametrize( "url, expected", [ ( "http://git.mycompany.com/username/myproject.git", ParsedGitUrl("http", "git.mycompany.com", "username", "myproject"), ), ( "http://subsubdomain.subdomain.company-net.com/username/myproject.git", ParsedGitUrl( "http", "subsubdomain.subdomain.company-net.com", "username", "myproject", ), ), ( "https://github.com/username/myproject.git", ParsedGitUrl("https", "github.com", "username", "myproject"), ), ( "https://gitlab.com/group/subgroup/myproject.git", ParsedGitUrl("https", "gitlab.com", "group/subgroup", "myproject"), ), ( "https://git.mycompany.com:4443/username/myproject.git", ParsedGitUrl("https", "git.mycompany.com:4443", "username", "myproject"), ), ( "https://subsubdomain.subdomain.company-net.com/username/myproject.git", ParsedGitUrl( "https", "subsubdomain.subdomain.company-net.com", "username", "myproject", ), ), ( "git://host.xz/path/to/repo.git/", ParsedGitUrl("git", "host.xz", "path/to", "repo"), ), ( "git://host.xz:9418/path/to/repo.git/", ParsedGitUrl("git", "host.xz:9418", "path/to", "repo"), ), ( "git@github.com:username/myproject.git", ParsedGitUrl("ssh", "git@github.com", "username", "myproject"), ), ( "git@subsubdomain.subdomain.company-net.com:username/myproject.git", ParsedGitUrl( "ssh", "git@subsubdomain.subdomain.company-net.com", "username", "myproject", ), ), ( "first.last_test-1@subsubdomain.subdomain.company-net.com:username/myproject.git", ParsedGitUrl( "ssh", "first.last_test-1@subsubdomain.subdomain.company-net.com", "username", "myproject", ), ), ( "ssh://git@github.com:3759/myproject.git", ParsedGitUrl("ssh", "git@github.com", "3759", "myproject"), ), ( "ssh://git@github.com:username/myproject.git", ParsedGitUrl("ssh", "git@github.com", "username", "myproject"), ), ( "ssh://git@bitbucket.org:7999/username/myproject.git", ParsedGitUrl("ssh", "git@bitbucket.org:7999", "username", "myproject"), ), ( "ssh://git@subsubdomain.subdomain.company-net.com:username/myproject.git", ParsedGitUrl( "ssh", "git@subsubdomain.subdomain.company-net.com", "username", "myproject", ), ), ( "git+ssh://git@github.com:username/myproject.git", ParsedGitUrl("ssh", "git@github.com", "username", "myproject"), ), ( "/Users/username/dev/remote/myproject.git", ParsedGitUrl("file", "", "Users/username/dev/remote", "myproject"), ), ( "file:///Users/username/dev/remote/myproject.git", ParsedGitUrl("file", "", "Users/username/dev/remote", "myproject"), ), ( "C:/Users/username/dev/remote/myproject.git", ParsedGitUrl("file", "", "C:/Users/username/dev/remote", "myproject"), ), ( "file:///C:/Users/username/dev/remote/myproject.git", ParsedGitUrl("file", "", "C:/Users/username/dev/remote", "myproject"), ), ], ) def test_parse_valid_git_urls(url: str, expected: ParsedGitUrl): """Test that a valid given git remote url is parsed correctly.""" assert expected == parse_git_url(url) @pytest.mark.parametrize( "url", [ "icmp://git", "abcdefghijklmnop.git", "../relative/path/to/repo.git", "http://domain/project.git", ], ) def test_parse_invalid_git_urls(url: str): """Test that an invalid git remote url throws a ValueError.""" with pytest.raises(ValueError): parse_git_url(url) @pytest.mark.parametrize( "unsorted_list, sorted_list, reverse, allow_hex", [ pytest.param( unsorted_list, sorted_list, reverse, allow_hex, id=f"({i}) {test_id}", ) for i, (test_id, unsorted_list, sorted_list, reverse, allow_hex) in enumerate( [ ( "Only numbers (with mixed digits, ASC)", ["5", "3", "10"], ["3", "5", "10"], False, False, ), ( "Only numbers (with mixed digits, DESC)", ["5", "3", "10"], ["10", "5", "3"], True, False, ), ( "Only PR numbers (ASC)", ["#5", "#3", "#10"], ["#3", "#5", "#10"], False, False, ), ( "Only PR numbers (DESC)", ["#5", "#3", "#10"], ["#10", "#5", "#3"], True, False, ), ( "Multiple prefixes (ASC)", ["#5", "PR#3", "PR#10", "#100"], ["#5", "#100", "PR#3", "PR#10"], False, False, ), ( "Multiple prefixes (DESC)", ["#5", "PR#3", "PR#10", "#100"], ["#100", "#5", "PR#10", "PR#3"], True, False, ), ( "No numbers mixed with mulitple prefixes (ASC)", ["word", "#100", "#1000", "PR#45"], ["#100", "#1000", "PR#45", "word"], False, False, ), ( "No numbers mixed with mulitple prefixes (DESC)", ["word", "#100", "#1000", "PR#45"], ["#1000", "#100", "PR#45", "word"], True, False, ), ( "Commit hash links in RST link format (ASC)", [".. _8ab43ed:", ".. _7ffed34:", ".. _a3b4c54:"], [".. _7ffed34:", ".. _8ab43ed:", ".. _a3b4c54:"], False, True, ), ( "Commit hash links in RST link format (DESC)", [".. _8ab43ed:", ".. _7ffed34:", ".. _a3b4c54:"], [".. _a3b4c54:", ".. _8ab43ed:", ".. _7ffed34:"], True, True, ), ( "Mixed numbers, PR numbers, and commit hash links in RST link format (ASC)", [ ".. _#5:", ".. _8ab43ed:", ".. _PR#3:", ".. _#20:", ".. _7ffed34:", ".. _#100:", ".. _a3b4c54:", ], [ ".. _7ffed34:", ".. _8ab43ed:", ".. _a3b4c54:", ".. _#5:", ".. _#20:", ".. _#100:", ".. _PR#3:", ], False, True, ), ( "Mixed numbers, PR numbers, and commit hash links in RST link format (DESC)", [ ".. _#5:", ".. _8ab43ed:", ".. _PR#3:", ".. _#20:", ".. _7ffed34:", ".. _#100:", ".. _a3b4c54:", ], [ ".. _a3b4c54:", ".. _8ab43ed:", ".. _7ffed34:", ".. _#100:", ".. _#20:", ".. _#5:", ".. _PR#3:", ], True, True, ), ( # No change since the prefixes are always alphabetical, asc/desc only is b/w numbers "Same numbers with different prefixes (ASC)", ["PR#5", "#5"], ["#5", "PR#5"], False, False, ), ( "Same numbers with different prefixes (DESC)", ["#5", "PR#5"], ["#5", "PR#5"], True, False, ), ], start=1, ) ], ) def test_sort_numerically( unsorted_list: Iterable[str], sorted_list: Iterable[str], reverse: bool, allow_hex: bool, ): actual_list = sort_numerically( iterable=unsorted_list, reverse=reverse, allow_hex=allow_hex, ) assert sorted_list == actual_list python-semantic-release-9.21.0/tests/unit/semantic_release/version/000077500000000000000000000000001475670435200254715ustar00rootroot00000000000000python-semantic-release-9.21.0/tests/unit/semantic_release/version/__init__.py000066400000000000000000000000001475670435200275700ustar00rootroot00000000000000python-semantic-release-9.21.0/tests/unit/semantic_release/version/declarations/000077500000000000000000000000001475670435200301415ustar00rootroot00000000000000python-semantic-release-9.21.0/tests/unit/semantic_release/version/declarations/__init__.py000066400000000000000000000000001475670435200322400ustar00rootroot00000000000000test_pattern_declaration.py000066400000000000000000000377001475670435200355240ustar00rootroot00000000000000python-semantic-release-9.21.0/tests/unit/semantic_release/version/declarationsfrom __future__ import annotations from pathlib import Path from re import compile as regexp from textwrap import dedent from typing import TYPE_CHECKING import pytest from pytest_lazy_fixtures.lazy_fixture import lf as lazy_fixture from semantic_release.version.declarations.enum import VersionStampType from semantic_release.version.declarations.i_version_replacer import IVersionReplacer from semantic_release.version.declarations.pattern import PatternVersionDeclaration from semantic_release.version.version import Version from tests.fixtures.git_repo import default_tag_format_str if TYPE_CHECKING: from re import Pattern def test_pattern_declaration_is_version_replacer(): """ Given the class PatternVersionDeclaration or an instance of it, When the class is evaluated as a subclass or an instance of, Then the evaluation is true """ assert issubclass(PatternVersionDeclaration, IVersionReplacer) pattern_instance = PatternVersionDeclaration( "file", r"^version = (?P.*)", VersionStampType.NUMBER_FORMAT ) assert isinstance(pattern_instance, IVersionReplacer) @pytest.mark.parametrize( str.join( ", ", [ "replacement_def", "tag_format", "starting_contents", "resulting_contents", "next_version", "test_file", ], ), [ pytest.param( replacement_def, tag_format, starting_contents, resulting_contents, next_version, test_file, id=test_id, ) for test_file in ["test_file"] for next_version in ["1.2.3"] for test_id, replacement_def, tag_format, starting_contents, resulting_contents in [ ( "Default number format for python string variable", f"{test_file}:__version__", # irrelevant for this case lazy_fixture(default_tag_format_str.__name__), # Uses equals separator with single quotes """__version__ = '1.0.0'""", f"""__version__ = '{next_version}'""", ), ( "Explicit number format for python string variable", f"{test_file}:__version__:{VersionStampType.NUMBER_FORMAT.value}", # irrelevant for this case lazy_fixture(default_tag_format_str.__name__), # Uses equals separator with single quotes """__version__ = '1.0.0'""", f"""__version__ = '{next_version}'""", ), ( "Using default tag format for python string variable", f"{test_file}:__version__:{VersionStampType.TAG_FORMAT.value}", lazy_fixture(default_tag_format_str.__name__), # Uses equals separator with single quotes """__version__ = 'v1.0.0'""", f"""__version__ = 'v{next_version}'""", ), ( "Using custom tag format for python string variable", f"{test_file}:__version__:{VersionStampType.TAG_FORMAT.value}", "module-v{version}", # Uses equals separator with double quotes '''__version__ = "module-v1.0.0"''', f'''__version__ = "module-v{next_version}"''', ), ( # Based on https://github.com/python-semantic-release/python-semantic-release/issues/1156 "Using default tag format for github actions uses-directive", f"{test_file}:repo/action-name:{VersionStampType.TAG_FORMAT.value}", lazy_fixture(default_tag_format_str.__name__), # Uses @ symbol separator without quotes or spaces """ uses: repo/action-name@v1.0.0""", f""" uses: repo/action-name@v{next_version}""", ), ( # Based on https://github.com/python-semantic-release/python-semantic-release/issues/1156 "Using custom tag format for github actions uses-directive", f"{test_file}:repo/action-name:{VersionStampType.TAG_FORMAT.value}", "module-v{version}", # Uses @ symbol separator without quotes or spaces """ uses: repo/action-name@module-v1.0.0""", f""" uses: repo/action-name@module-v{next_version}""", ), ( # Based on https://github.com/python-semantic-release/python-semantic-release/issues/846 "Using default tag format for multi-line yaml", f"{test_file}:newTag:{VersionStampType.TAG_FORMAT.value}", lazy_fixture(default_tag_format_str.__name__), # Uses colon separator without quotes dedent( """\ # kustomization.yaml images: - name: repo/image newTag: v1.0.0 """ ), dedent( f"""\ # kustomization.yaml images: - name: repo/image newTag: v{next_version} """ ), ), ( # Based on https://github.com/python-semantic-release/python-semantic-release/issues/846 "Using custom tag format for multi-line yaml", f"{test_file}:newTag:{VersionStampType.TAG_FORMAT.value}", "module-v{version}", # Uses colon separator without quotes dedent( """\ # kustomization.yaml images: - name: repo/image newTag: module-v1.0.0 """ ), dedent( f"""\ # kustomization.yaml images: - name: repo/image newTag: module-v{next_version} """ ), ), ( "Explicit number format for python walrus string variable", f"{test_file}:version:{VersionStampType.NUMBER_FORMAT.value}", # irrelevant for this case lazy_fixture(default_tag_format_str.__name__), # Uses walrus separator with single quotes """if version := '1.0.0': """, f"""if version := '{next_version}': """, ), ( "Using default number format for multi-line & quoted json", f"{test_file}:version:{VersionStampType.NUMBER_FORMAT.value}", # irrelevant for this case lazy_fixture(default_tag_format_str.__name__), # Uses colon separator with double quotes dedent( """\ { "version": "1.0.0" } """ ), dedent( f"""\ {{ "version": "{next_version}" }} """ ), ), ( "Using default tag format for multi-line & quoted json", f"{test_file}:version:{VersionStampType.TAG_FORMAT.value}", lazy_fixture(default_tag_format_str.__name__), # Uses colon separator with double quotes dedent( """\ { "version": "v1.0.0" } """ ), dedent( f"""\ {{ "version": "v{next_version}" }} """ ), ), ] ], ) def test_pattern_declaration_from_definition( replacement_def: str, tag_format: str, starting_contents: str, resulting_contents: str, next_version: str, test_file: str, change_to_ex_proj_dir: None, ): """ Given a file with a formatted version string, When update_file_w_version() is called with a new version, Then the file is updated with the new version string in the specified tag or number format Version variables can be separated by either "=", ":", "@", or ':=' with optional whitespace between operator and variable name. The variable name or values can also be wrapped in either single or double quotes. """ # Setup: create file with initial contents expected_filepath = Path(test_file).resolve() expected_filepath.write_text(starting_contents) # Create Pattern Replacer version_replacer = PatternVersionDeclaration.from_string_definition( replacement_def, tag_format, ) # Act: apply version change actual_file_modified = version_replacer.update_file_w_version( new_version=Version.parse(next_version, tag_format=tag_format), noop=False, ) # Evaluate actual_contents = Path(test_file).read_text() assert resulting_contents == actual_contents assert expected_filepath == actual_file_modified def test_pattern_declaration_no_file_change( default_tag_format_str: str, change_to_ex_proj_dir: None, ): """ Given a configured stamp file is already up-to-date, When update_file_w_version() is called with the same version, Then the file is not modified and no path is returned """ test_file = "test_file" expected_filepath = Path(test_file).resolve() next_version = Version.parse("1.2.3", tag_format=default_tag_format_str) starting_contents = f"""__version__ = '{next_version}'\n""" # Setup: create file with initial contents expected_filepath.write_text(starting_contents) # Create Pattern Replacer version_replacer = PatternVersionDeclaration.from_string_definition( f"{test_file}:__version__:{VersionStampType.NUMBER_FORMAT.value}", tag_format=default_tag_format_str, ) # Act: apply version change file_modified = version_replacer.update_file_w_version( new_version=next_version, noop=False, ) # Evaluate actual_contents = expected_filepath.read_text() assert starting_contents == actual_contents assert file_modified is None def test_pattern_declaration_error_on_missing_file( default_tag_format_str: str, ): # Initialization should not fail or do anything intensive version_replacer = PatternVersionDeclaration.from_string_definition( "nonexistent_file:__version__", tag_format=default_tag_format_str, ) with pytest.raises(FileNotFoundError): version_replacer.update_file_w_version( new_version=Version.parse("1.2.3", tag_format=default_tag_format_str), noop=False, ) def test_pattern_declaration_no_version_in_file( default_tag_format_str: str, change_to_ex_proj_dir: None, ): test_file = "test_file" expected_filepath = Path(test_file).resolve() starting_contents = """other content\n""" # Setup: create file with initial contents expected_filepath.write_text(starting_contents) # Create Pattern Replacer version_replacer = PatternVersionDeclaration.from_string_definition( f"{test_file}:__version__:{VersionStampType.NUMBER_FORMAT.value}", tag_format=default_tag_format_str, ) file_modified = version_replacer.update_file_w_version( new_version=Version.parse("1.2.3", tag_format=default_tag_format_str), noop=False, ) # Evaluate actual_contents = expected_filepath.read_text() assert file_modified is None assert starting_contents == actual_contents def test_pattern_declaration_noop_is_noop( default_tag_format_str: str, change_to_ex_proj_dir: None, ): test_file = "test_file" expected_filepath = Path(test_file).resolve() starting_contents = """__version__ = '1.0.0'\n""" # Setup: create file with initial contents expected_filepath.write_text(starting_contents) # Create Pattern Replacer version_replacer = PatternVersionDeclaration.from_string_definition( f"{test_file}:__version__:{VersionStampType.NUMBER_FORMAT.value}", tag_format=default_tag_format_str, ) # Act: apply version change file_modified = version_replacer.update_file_w_version( new_version=Version.parse("1.2.3", tag_format=default_tag_format_str), noop=True, ) # Evaluate actual_contents = Path(test_file).read_text() assert starting_contents == actual_contents assert expected_filepath == file_modified def test_pattern_declaration_noop_warning_on_missing_file( default_tag_format_str: str, capsys: pytest.CaptureFixture[str], ): version_replacer = PatternVersionDeclaration.from_string_definition( "nonexistent_file:__version__", tag_format=default_tag_format_str, ) file_to_modify = version_replacer.update_file_w_version( new_version=Version.parse("1.2.3", tag_format=default_tag_format_str), noop=True, ) # Evaluate assert file_to_modify is None assert ( "FILE NOT FOUND: cannot stamp version in non-existent file" in capsys.readouterr().err ) def test_pattern_declaration_noop_warning_on_no_version_in_file( default_tag_format_str: str, capsys: pytest.CaptureFixture[str], change_to_ex_proj_dir: None, ): test_file = "test_file" starting_contents = """other content\n""" # Setup: create file with initial contents Path(test_file).write_text(starting_contents) # Create Pattern Replacer version_replacer = PatternVersionDeclaration.from_string_definition( f"{test_file}:__version__:{VersionStampType.NUMBER_FORMAT.value}", tag_format=default_tag_format_str, ) file_to_modify = version_replacer.update_file_w_version( new_version=Version.parse("1.2.3", tag_format=default_tag_format_str), noop=True, ) # Evaluate assert file_to_modify is None assert ( "VERSION PATTERN NOT FOUND: no version to stamp in file" in capsys.readouterr().err ) @pytest.mark.parametrize( "search_text, error_msg", [ ( search_text, error_msg, ) for error_msg, search_text in [ *[ ("must use 'version' as a named group", s_text) for s_text in [ r"^version = (.*)$", r"^version = (?P.*)", r"(?P.*)", ] ], ("Invalid regular expression", r"*"), ] ], ) def test_bad_version_regex_fails(search_text: str, error_msg: Pattern[str] | str): with pytest.raises(ValueError, match=error_msg): PatternVersionDeclaration( "doesn't matter", search_text, VersionStampType.NUMBER_FORMAT ) @pytest.mark.parametrize( "replacement_def, error_msg", [ pytest.param( replacement_def, error_msg, id=str(error_msg), ) for replacement_def, error_msg in [ ( f"{Path(__file__)!s}", regexp(r"Invalid replacement definition .*, missing ':'"), ), ( f"{Path(__file__)!s}:__version__:not_a_valid_version_type", "Invalid stamp type, must be one of:", ), ] ], ) def test_pattern_declaration_w_invalid_definition( default_tag_format_str: str, replacement_def: str, error_msg: Pattern[str] | str, ): """ check if PatternVersionDeclaration raises ValueError when loaded from invalid strings given in the config file """ with pytest.raises(ValueError, match=error_msg): PatternVersionDeclaration.from_string_definition( replacement_def, default_tag_format_str, ) test_toml_declaration.py000066400000000000000000000245701475670435200350230ustar00rootroot00000000000000python-semantic-release-9.21.0/tests/unit/semantic_release/version/declarationsfrom __future__ import annotations from pathlib import Path from re import compile as regexp from textwrap import dedent from typing import TYPE_CHECKING import pytest from pytest_lazy_fixtures.lazy_fixture import lf as lazy_fixture from semantic_release.version.declarations.enum import VersionStampType from semantic_release.version.declarations.i_version_replacer import IVersionReplacer from semantic_release.version.declarations.toml import TomlVersionDeclaration from semantic_release.version.version import Version from tests.fixtures.git_repo import default_tag_format_str if TYPE_CHECKING: from re import Pattern def test_toml_declaration_is_version_replacer(): """ Given the class TomlVersionDeclaration or an instance of it, When the class is evaluated as a subclass or an instance of, Then the evaluation is true """ assert issubclass(TomlVersionDeclaration, IVersionReplacer) toml_instance = TomlVersionDeclaration( "file", "project.version", VersionStampType.NUMBER_FORMAT ) assert isinstance(toml_instance, IVersionReplacer) @pytest.mark.parametrize( str.join( ", ", [ "replacement_def", "tag_format", "starting_contents", "resulting_contents", "next_version", "test_file", ], ), [ pytest.param( replacement_def, tag_format, starting_contents, resulting_contents, next_version, test_file, id=test_id, ) for test_file in ["test_file.toml"] for next_version in ["1.2.3"] for test_id, replacement_def, tag_format, starting_contents, resulting_contents in [ ( "Default number format for project.version", f"{test_file}:project.version", # irrelevant for this case lazy_fixture(default_tag_format_str.__name__), # Uses equals separator with single quotes dedent( """\ [project] version = '1.0.0' """ ), dedent( f"""\ [project] version = "{next_version}" """ ), ), ( "Explicit number format for project.version", f"{test_file}:project.version:{VersionStampType.NUMBER_FORMAT.value}", # irrelevant for this case lazy_fixture(default_tag_format_str.__name__), # Uses equals separator with double quotes dedent( """\ [project] version = "1.0.0" """ ), dedent( f"""\ [project] version = "{next_version}" """ ), ), ( "Using default tag format for toml string variable", f"{test_file}:version:{VersionStampType.TAG_FORMAT.value}", lazy_fixture(default_tag_format_str.__name__), # Uses equals separator with single quotes '''version = "v1.0.0"''', f'''version = "v{next_version}"''', ), ( "Using custom tag format for toml string variable", f"{test_file}:version:{VersionStampType.TAG_FORMAT.value}", "module-v{version}", # Uses equals separator with double quotes '''version = "module-v1.0.0"''', f'''version = "module-v{next_version}"''', ), ] ], ) def test_toml_declaration_from_definition( replacement_def: str, tag_format: str, starting_contents: str, resulting_contents: str, next_version: str, test_file: str, change_to_ex_proj_dir: None, ): """ Given a file with a formatted version string, When update_file_w_version() is called with a new version, Then the file is updated with the new version string in the specified tag or number format Version variables can be separated by either "=", ":", "@", or ':=' with optional whitespace between operator and variable name. The variable name or values can also be wrapped in either single or double quotes. """ # Setup: create file with initial contents expected_filepath = Path(test_file).resolve() expected_filepath.write_text(starting_contents) # Create Pattern Replacer version_replacer = TomlVersionDeclaration.from_string_definition(replacement_def) # Act: apply version change actual_file_modified = version_replacer.update_file_w_version( new_version=Version.parse(next_version, tag_format=tag_format), noop=False, ) # Evaluate actual_contents = Path(test_file).read_text() assert resulting_contents == actual_contents assert expected_filepath == actual_file_modified def test_toml_declaration_no_file_change( change_to_ex_proj_dir: None, ): """ Given a configured stamp file is already up-to-date, When update_file_w_version() is called with the same version, Then the file is not modified and no path is returned """ test_file = "test_file" next_version = Version.parse("1.2.3") starting_contents = dedent( f"""\ [project] version = "{next_version}" """ ) # Setup: create file with initial contents Path(test_file).write_text(starting_contents) # Create Pattern Replacer version_replacer = TomlVersionDeclaration.from_string_definition( f"{test_file}:project.version:{VersionStampType.NUMBER_FORMAT.value}", ) # Act: apply version change file_modified = version_replacer.update_file_w_version( new_version=next_version, noop=False, ) # Evaluate actual_contents = Path(test_file).read_text() assert starting_contents == actual_contents assert file_modified is None def test_toml_declaration_error_on_missing_file(): # Initialization should not fail or do anything intensive version_replacer = TomlVersionDeclaration.from_string_definition( "nonexistent_file:version", ) with pytest.raises(FileNotFoundError): version_replacer.update_file_w_version( new_version=Version.parse("1.2.3"), noop=False, ) def test_toml_declaration_no_version_in_file( change_to_ex_proj_dir: None, ): test_file = "test_file" expected_filepath = Path(test_file).resolve() starting_contents = dedent( """\ [project] name = "example" """ ) # Setup: create file with initial contents expected_filepath.write_text(starting_contents) # Create Pattern Replacer version_replacer = TomlVersionDeclaration.from_string_definition( f"{test_file}:project.version:{VersionStampType.NUMBER_FORMAT.value}", ) file_modified = version_replacer.update_file_w_version( new_version=Version.parse("1.2.3"), noop=False, ) # Evaluate actual_contents = expected_filepath.read_text() assert file_modified is None assert starting_contents == actual_contents def test_toml_declaration_noop_is_noop( change_to_ex_proj_dir: None, ): test_file = "test_file" expected_filepath = Path(test_file).resolve() starting_contents = dedent( """\ [project] version = '1.0.0' """ ) # Setup: create file with initial contents expected_filepath.write_text(starting_contents) # Create Pattern Replacer version_replacer = TomlVersionDeclaration.from_string_definition( f"{test_file}:project.version:{VersionStampType.NUMBER_FORMAT.value}", ) # Act: apply version change file_modified = version_replacer.update_file_w_version( new_version=Version.parse("1.2.3"), noop=True, ) # Evaluate actual_contents = Path(test_file).read_text() assert starting_contents == actual_contents assert expected_filepath == file_modified def test_toml_declaration_noop_warning_on_missing_file( capsys: pytest.CaptureFixture[str], ): version_replacer = TomlVersionDeclaration.from_string_definition( "nonexistent_file:version", ) file_to_modify = version_replacer.update_file_w_version( new_version=Version.parse("1.2.3"), noop=True, ) # Evaluate assert file_to_modify is None assert ( "FILE NOT FOUND: cannot stamp version in non-existent file" in capsys.readouterr().err ) def test_toml_declaration_noop_warning_on_no_version_in_file( capsys: pytest.CaptureFixture[str], change_to_ex_proj_dir: None, ): test_file = "test_file" starting_contents = dedent( """\ [project] name = "example" """ ) # Setup: create file with initial contents Path(test_file).write_text(starting_contents) # Create Pattern Replacer version_replacer = TomlVersionDeclaration.from_string_definition( f"{test_file}:project.version:{VersionStampType.NUMBER_FORMAT.value}", ) file_to_modify = version_replacer.update_file_w_version( new_version=Version.parse("1.2.3"), noop=True, ) # Evaluate assert file_to_modify is None assert ( "VERSION PATTERN NOT FOUND: no version to stamp in file" in capsys.readouterr().err ) @pytest.mark.parametrize( "replacement_def, error_msg", [ pytest.param( replacement_def, error_msg, id=str(error_msg), ) for replacement_def, error_msg in [ ( f"{Path(__file__)!s}", regexp(r"Invalid TOML replacement definition .*, missing ':'"), ), ( f"{Path(__file__)!s}:tool.poetry.version:not_a_valid_version_type", "Invalid stamp type, must be one of:", ), ] ], ) def test_toml_declaration_w_invalid_definition( replacement_def: str, error_msg: Pattern[str] | str, ): """ check if TomlVersionDeclaration raises ValueError when loaded from invalid strings given in the config file """ with pytest.raises(ValueError, match=error_msg): TomlVersionDeclaration.from_string_definition(replacement_def) python-semantic-release-9.21.0/tests/unit/semantic_release/version/test_algorithm.py000066400000000000000000000247361475670435200311040ustar00rootroot00000000000000from __future__ import annotations from typing import TYPE_CHECKING from unittest import mock import pytest from git import Commit, Repo, TagReference from semantic_release.enums import LevelBump from semantic_release.version.algorithm import ( _increment_version, _traverse_graph_for_commits, tags_and_versions, ) from semantic_release.version.translator import VersionTranslator from semantic_release.version.version import Version from tests.fixtures.repos import repo_w_initial_commit if TYPE_CHECKING: from typing import Sequence @pytest.mark.usefixtures(repo_w_initial_commit.__name__) def test_traverse_graph_for_commits(): # Setup fake git graph """ * merge commit 6 (start) [3636363] |\ | * commit 5 [3535353] | * commit 4 [3434343] |/ * commit 3 [3333333] * commit 2 [3232323] * commit 1 [3131313] * v1.0.0 [3030303] """ repo = Repo() v1_commit = Commit(repo, binsha=b"0" * 20, parents=[]) class TagReferenceOverride(TagReference): commit = v1_commit # mocking the commit property v1_tag = TagReferenceOverride(repo, "refs/tags/v1.0.0", check_path=False) trunk = Commit( repo, binsha=b"3" * 20, parents=[ Commit( repo, binsha=b"2" * 20, parents=[ Commit(repo, binsha=b"1" * 20, parents=[v1_commit]), ], ), ], ) start_commit = Commit( repo, binsha=b"6" * 20, parents=[ trunk, Commit( repo, binsha=b"5" * 20, parents=[ Commit(repo, binsha=b"4" * 20, parents=[trunk]), ], ), ], ) commit_1 = trunk.parents[0].parents[0] commit_2 = trunk.parents[0] commit_3 = trunk commit_4 = start_commit.parents[1].parents[0] commit_5 = start_commit.parents[1] commit_6 = start_commit expected_commit_order = [ commit_6.hexsha, commit_5.hexsha, commit_4.hexsha, commit_3.hexsha, commit_2.hexsha, commit_1.hexsha, ] # Execute with mock.patch.object( repo, repo.iter_commits.__name__, return_value=iter([v1_commit]) ): actual_commit_order = [ commit.hexsha for commit in _traverse_graph_for_commits( head_commit=start_commit, latest_release_tag_str=v1_tag.name, ) ] # Verify assert expected_commit_order == actual_commit_order @pytest.mark.parametrize( "tags, sorted_tags", [ ( ["v1.0.0", "v1.1.0", "v1.1.1"], ["v1.1.1", "v1.1.0", "v1.0.0"], ), ( ["v1.1.0", "v1.0.0", "v1.1.1"], ["v1.1.1", "v1.1.0", "v1.0.0"], ), ( ["v1.1.1", "v1.1.0", "v1.0.0"], ["v1.1.1", "v1.1.0", "v1.0.0"], ), # Examples from https://semver.org/#spec-item-11 (or inspired, where not all # version structures are supported) ( ["v1.0.0", "v2.0.0", "v2.1.1", "v2.1.0"], ["v2.1.1", "v2.1.0", "v2.0.0", "v1.0.0"], ), ( [ "v1.0.0-rc.1", "v1.0.0-beta.2", "v1.0.0-beta.11", "v1.0.0-alpha.1", "v1.0.0-alpha.beta.1", "v1.0.0", ], [ "v1.0.0", "v1.0.0-rc.1", "v1.0.0-beta.11", "v1.0.0-beta.2", "v1.0.0-alpha.beta.1", "v1.0.0-alpha.1", ], ), ], ) def test_sorted_repo_tags_and_versions(tags: list[str], sorted_tags: list[str]): repo = Repo() translator = VersionTranslator() tagrefs = [repo.tag(tag) for tag in tags] actual = [t.name for t, _ in tags_and_versions(tagrefs, translator)] assert sorted_tags == actual @pytest.mark.parametrize( "tag_format, invalid_tags, valid_tags", [ ( "v{version}", ("test-v1.1.0", "v1.1.0-test-test"), [ "v1.0.0-rc.1", "v1.0.0-beta.2", "v1.0.0-beta.11", "v1.0.0-alpha.1", "v1.0.0-alpha.beta.1", "v1.0.0", ], ), ( "v{version}", ("0.3", "0.4"), [ "v1.0.0-rc.1", "v1.0.0-beta.2", "v1.0.0-beta.11", "v1.0.0-alpha.1", "v1.0.0-alpha.beta.1", "v1.0.0", ], ), ( r"(\w+--)?v{version}", ("v1.1.0-test-test", "test_v1.1.0"), [ "v1.0.0-rc.1", "test--v1.1.0", "v1.0.0-beta.2", "v1.0.0-beta.11", "v1.0.0-alpha.1", "v1.0.0-alpha.beta.1", "v1.0.0", ], ), ( r"(?Pfeature|fix)/v{version}--(?Pdev|stg|prod)", ("v1.1.0--test", "test_v1.1.0", "docs/v1.2.0--dev"), [ "feature/v1.0.0-rc.1--dev", "fix/v1.1.0--stg", "feature/v1.0.0-beta.2--stg", "fix/v1.0.0-beta.11--dev", "fix/v1.0.0-alpha.1--dev", "feature/v1.0.0-alpha.beta.1--dev", "feature/v1.0.0--prod", ], ), ], ) def test_tags_and_versions_ignores_invalid_tags_as_versions( tag_format: str, invalid_tags: Sequence[str], valid_tags: Sequence[str], ): repo = Repo() translator = VersionTranslator(tag_format=tag_format) tagrefs = [repo.tag(tag) for tag in (*valid_tags, *invalid_tags)] actual = [t.name for t, _ in tags_and_versions(tagrefs, translator)] assert set(valid_tags) == set(actual) @pytest.mark.parametrize( str.join( ", ", [ "latest_version", "latest_full_version", "level_bump", "prerelease", "prerelease_token", "expected_version", ], ), [ # NOTE: level_bump != LevelBump.NO_RELEASE, we return early in the # algorithm to discount this case # NOTE: you can only perform a PRERELEASE_REVISION bump on a previously # prerelease version and if you are requesting a prerelease ( "1.0.1-rc.1", "1.0.0", LevelBump.PRERELEASE_REVISION, True, "rc", "1.0.1-rc.2", ), *[ ( "1.0.0", "1.0.0", bump_level, prerelease, "rc", expected_version, ) for bump_level, prerelease, expected_version in [ (LevelBump.PATCH, False, "1.0.1"), (LevelBump.PATCH, True, "1.0.1-rc.1"), (LevelBump.MINOR, False, "1.1.0"), (LevelBump.MINOR, True, "1.1.0-rc.1"), (LevelBump.MAJOR, False, "2.0.0"), (LevelBump.MAJOR, True, "2.0.0-rc.1"), ] ], ( "1.2.4-rc.1", "1.2.3", LevelBump.PRERELEASE_REVISION, True, "rc", "1.2.4-rc.2", ), *[ ( "1.2.4-rc.1", "1.2.3", bump_level, prerelease, "rc", expected_version, ) for bump_level, prerelease, expected_version in [ (LevelBump.PATCH, False, "1.2.4"), (LevelBump.PATCH, True, "1.2.4-rc.2"), (LevelBump.MINOR, False, "1.3.0"), (LevelBump.MINOR, True, "1.3.0-rc.1"), (LevelBump.MAJOR, False, "2.0.0"), (LevelBump.MAJOR, True, "2.0.0-rc.1"), ] ], ( "2.0.0-rc.1", "1.19.3", LevelBump.PRERELEASE_REVISION, True, "rc", "2.0.0-rc.2", ), *[ ( "2.0.0-rc.1", "1.22.0", bump_level, prerelease, "rc", expected_version, ) for bump_level, prerelease, expected_version in [ (LevelBump.PATCH, False, "2.0.0"), (LevelBump.PATCH, True, "2.0.0-rc.2"), (LevelBump.MINOR, False, "2.0.0"), (LevelBump.MINOR, True, "2.0.0-rc.2"), (LevelBump.MAJOR, False, "2.0.0"), (LevelBump.MAJOR, True, "2.0.0-rc.2"), ] ], ], ) def test_increment_version_no_major_on_zero( latest_version: str, latest_full_version: str, level_bump: LevelBump, prerelease: bool, prerelease_token: str, expected_version: str, ): actual = _increment_version( latest_version=Version.parse(latest_version), latest_full_version=Version.parse(latest_full_version), level_bump=level_bump, prerelease=prerelease, prerelease_token=prerelease_token, major_on_zero=False, allow_zero_version=True, ) assert expected_version == str(actual) @pytest.mark.parametrize( "latest_version, latest_full_version, level_bump, prerelease, prerelease_token", [ # NOTE: level_bump != LevelBump.NO_RELEASE, we return early in the # algorithm to discount this case # NOTE: you can only perform a PRERELEASE_REVISION bump on a previously # prerelease version and if you are requesting a prerelease ( "1.0.0", "1.0.0", LevelBump.PRERELEASE_REVISION, False, "rc", ), ( "1.0.0", "1.0.0", LevelBump.PRERELEASE_REVISION, True, "rc", ), ], ) def test_increment_version_invalid_operation( latest_version: str, latest_full_version: str, level_bump: LevelBump, prerelease: bool, prerelease_token: str, ): with pytest.raises(ValueError): _increment_version( latest_version=Version.parse(latest_version), latest_full_version=Version.parse(latest_full_version), level_bump=level_bump, prerelease=prerelease, prerelease_token=prerelease_token, major_on_zero=False, allow_zero_version=True, ) python-semantic-release-9.21.0/tests/unit/semantic_release/version/test_translator.py000066400000000000000000000056011475670435200312750ustar00rootroot00000000000000import pytest from semantic_release.const import SEMVER_REGEX from semantic_release.version.translator import VersionTranslator from semantic_release.version.version import Version from tests.const import ( A_FULL_VERSION_STRING, A_FULL_VERSION_STRING_WITH_BUILD_METADATA, A_PRERELEASE_VERSION_STRING, ) @pytest.fixture def a_full_version() -> Version: return Version.parse(A_FULL_VERSION_STRING) @pytest.fixture def a_prerelease_version() -> Version: return Version.parse(A_PRERELEASE_VERSION_STRING) @pytest.fixture def a_full_version_with_build_metadata() -> Version: return Version.parse(A_FULL_VERSION_STRING_WITH_BUILD_METADATA) @pytest.mark.parametrize( "version_string", [ A_FULL_VERSION_STRING, A_PRERELEASE_VERSION_STRING, A_FULL_VERSION_STRING_WITH_BUILD_METADATA, "3.2.3-alpha.dev3+local.12345", # Pretty much as complex an example as there is ], ) def test_succeeds_semver_regex_match(version_string: str): assert SEMVER_REGEX.fullmatch( version_string ), "a valid semantic version was not matched" @pytest.mark.parametrize( "invalid_version_str", ["v1.2.3", "2.1", "3.1.1..3", "4.1.1.dev3"], # PEP440 version ) def test_invalid_semver_not_matched(invalid_version_str: str): assert SEMVER_REGEX.fullmatch(invalid_version_str) is None @pytest.mark.parametrize("fmt", ["version", "{versioN}", "v{major}.{minor}.{patch}"]) def test_invalid_tag_format(fmt: str): with pytest.raises(ValueError) as err: VersionTranslator(tag_format=fmt) assert all(("tag_format" in str(err), "version" in str(err), fmt in str(err))) @pytest.mark.parametrize( "version_string", [ A_FULL_VERSION_STRING, A_PRERELEASE_VERSION_STRING, A_FULL_VERSION_STRING_WITH_BUILD_METADATA, ], ) @pytest.mark.parametrize( "tag_format, prerelease_token", [ ("v{version}", "dev"), ("v{version}", "alpha"), ("special-tagging-scheme-{version}", "rc"), ], ) def test_translator_converts_versions_with_default_formatting_rules( version_string: str, tag_format: str, prerelease_token: str ): translator = VersionTranslator( tag_format=tag_format, prerelease_token=prerelease_token ) expected_version_obj = Version.parse( version_string, prerelease_token=translator.prerelease_token ) expected_tag = tag_format.format(version=version_string) actual_version_obj = translator.from_string(version_string) actual_tag = translator.str_to_tag(version_string) # These are important assumptions for formatting into source files/tags/etc assert version_string == str(actual_version_obj) assert expected_version_obj == actual_version_obj assert expected_tag == actual_tag assert expected_version_obj == (translator.from_tag(expected_tag) or "") assert version_string == str(translator.from_tag(actual_tag) or "") python-semantic-release-9.21.0/tests/unit/semantic_release/version/test_version.py000066400000000000000000000227621475670435200306000ustar00rootroot00000000000000import operator import random import pytest from semantic_release.enums import LevelBump from semantic_release.errors import InvalidVersion from semantic_release.version.version import Version random.seed(0) EXAMPLE_VERSION_STRINGS = [ "1.0.0", "0.1.0", "0.0.1", "1.2.3", "0.2.4", "2.6.15", "13.0.0", "4.26.3", "1.0.0-rc.1", "4.26.0-beta.3", "5.3.1+local.123456", "9.22.0-alpha.4+build.9999", ] @pytest.mark.parametrize( "version_parts", # Major, minor, patch, prerelease_token, prerelease_revision, build_metadata [ (1, 0, 0, "rc", None, ""), (0, 1, 0, "rc", None, ""), (0, 0, 1, "rc", None, ""), (1, 2, 3, "rc", None, ""), (0, 2, 4, "rc", None, ""), (2, 6, 15, "rc", None, ""), (13, 0, 0, "rc", None, ""), (4, 26, 3, "rc", None, ""), (1, 0, 0, "rc", 1, ""), (4, 26, 3, "beta", 3, ""), (5, 3, 1, "rc", None, "local.123456"), (9, 22, 0, "alpha", 4, "build.9999"), (17, 0, 3, "custom-token", 12, ""), (17, 0, 3, "custom-token-3-6-9", 12, ""), (17, 0, 3, "custom-token", 12, "build.9999"), ], ) def test_version_parse_succeeds(version_parts): full = f"{version_parts[0]}.{version_parts[1]}.{version_parts[2]}" prerelease = f"-{version_parts[3]}.{version_parts[4]}" if version_parts[4] else "" build_metadata = f"+{version_parts[5]}" if version_parts[5] else "" version_str = f"{full}{prerelease}{build_metadata}" version = Version.parse(version_str) assert version.major == version_parts[0] assert version.minor == version_parts[1] assert version.patch == version_parts[2] assert version.prerelease_token == version_parts[3] assert version.prerelease_revision == version_parts[4] assert version.build_metadata == version_parts[5] assert str(version) == version_str @pytest.mark.parametrize( "bad_version", [ "v1.2.3", "2.3", "2.1.dev0", "2.1.4.post5", "alpha-1.2.3", "17.0.3-custom_token.12", "9", "4.1.2!-major", "%.*.?", "M2.m3.p1", ], ) def test_version_parse_fails(bad_version): with pytest.raises(InvalidVersion, match=f"{bad_version!r}"): Version.parse(bad_version) @pytest.fixture(params=EXAMPLE_VERSION_STRINGS) def a_version(request): return Version.parse(request.param) @pytest.mark.parametrize( "bad_format", ["non_unique_format", "case_sensitive_{Version}", "typo_{versione}"] ) def test_tag_format_must_contain_version_field(a_version, bad_format): with pytest.raises(ValueError, match=f"Invalid tag_format {bad_format!r}"): a_version.tag_format = bad_format @pytest.mark.parametrize( "tag_format", [ "v{version}", "dev-{version}", "release-_-{version}", "{version}-final", "{version}-demo-{version}", ], ) def test_change_tag_format_updates_as_tag_method(a_version, tag_format): a_version.tag_format = tag_format assert a_version.as_tag() == tag_format.format(version=str(a_version)) @pytest.mark.parametrize( "version_str, is_prerelease", [ ("1.0.0", False), ("14.33.10", False), ("2.1.1-rc.1", True), ("65.1.2-alpha.4", True), ("17.0.3-custom-token.12", True), ("17.0.3-custom-token.12+20220101000000", True), ("4.2.4+zzzz9000", False), ], ) def test_version_prerelease(version_str, is_prerelease): assert Version.parse(version_str).is_prerelease == is_prerelease def test_version_eq_succeeds(a_version): assert a_version == a_version assert a_version == str(a_version) @pytest.mark.parametrize( "lower_version, upper_version", [ ("1.0.0", "1.0.1"), ("1.0.0", "1.1.0"), ("1.0.0", "1.1.1"), ("1.0.0", "2.0.0"), ("1.0.0-rc.1", "1.0.0"), ("1.0.0-rc.1", "1.0.0-rc.2"), ("1.0.0-alpha.1", "1.0.1-beta.1"), ("1.0.1", "2.0.0-rc.1"), ], ) @pytest.mark.parametrize( "op", [ operator.lt, operator.le, operator.ne, lambda left, right: left < right, lambda left, right: left <= right, lambda left, right: left != right, ], ) def test_version_comparator_succeeds(lower_version, upper_version, op): left = Version.parse(lower_version) right = Version.parse(upper_version) # Test both on Version $op string and on Version $op Version assert op(left, right) assert op(left, str(right)) @pytest.mark.parametrize( "bad_input", [ 5, "foo-4.22", ["a", list, "of", 5, ("things",)], (1, 2, 3), {"foo": 12}, "v2.3.4", ], ) @pytest.mark.parametrize( "op", [ operator.lt, operator.le, operator.gt, operator.ge, ], ) def test_version_comparator_typeerror(bad_input, op): with pytest.raises(TypeError): op(Version.parse("1.4.5"), bad_input) def test_version_equality(a_version): assert a_version == Version.parse(str(a_version)) @pytest.mark.parametrize( "left, right", [("1.2.3+local.3", "1.2.3"), ("2.1.1-rc.1+build.7777", "2.1.1-rc.1")] ) def test_version_equality_when_build_metadata_lost(left, right): assert Version.parse(left) == Version.parse(right) @pytest.mark.parametrize( "lower_version, upper_version, level", [ ("1.0.0", "1.0.1", LevelBump.PATCH), ("1.0.0", "1.1.0", LevelBump.MINOR), ("1.0.0", "1.1.1", LevelBump.MINOR), ("1.0.0", "2.0.0", LevelBump.MAJOR), ("1.0.0-rc.1", "1.0.0", LevelBump.PRERELEASE_REVISION), ("1.0.1", "1.1.0-rc.1", LevelBump.MINOR), ("1.0.0-rc.1", "1.0.0-rc.2", LevelBump.PRERELEASE_REVISION), ("1.0.0-alpha.1", "1.0.1-beta.1", LevelBump.PATCH), ("1.0.1", "2.0.0-rc.1", LevelBump.MAJOR), ], ) def test_version_difference(lower_version, upper_version, level): left = Version.parse(lower_version) right = Version.parse(upper_version) assert (left - right) is level assert (right - left) is level @pytest.mark.parametrize( "bad_input", [ 5, "foo-4.22", ["a", list, "of", 5, ("things",)], (1, 2, 3), {"foo": 12}, "v2.3.4", ], ) def test_unimplemented_version_diff(bad_input): with pytest.raises(TypeError, match=r"unsupported operand type"): Version.parse("1.2.3") - bad_input @pytest.mark.parametrize( "current_version, prerelease_token, expected_prerelease_version", [ ("1.2.3", "rc", "1.2.3-rc.1"), ("1.1.1-rc.2", "rc", "1.1.1-rc.2"), ("2.0.0", "beta", "2.0.0-beta.1"), ("2.0.0-beta.1", "beta", "2.0.0-beta.1"), ], ) def test_version_to_prerelease_defaults( current_version, prerelease_token, expected_prerelease_version ): assert Version.parse(current_version).to_prerelease( token=prerelease_token ) == Version.parse(expected_prerelease_version) @pytest.mark.parametrize( "current_version, prerelease_token, revision, expected_prerelease_version", [ ("1.2.3", "rc", 3, "1.2.3-rc.3"), ("1.1.1-rc.1", "rc", 3, "1.1.1-rc.3"), ("2.0.0", "beta", None, "2.0.0-beta.1"), ("2.0.0-beta.1", "beta", 4, "2.0.0-beta.4"), ], ) def test_version_to_prerelease_with_params( current_version, prerelease_token, revision, expected_prerelease_version ): assert Version.parse(current_version).to_prerelease( token=prerelease_token, revision=revision ) == Version.parse(expected_prerelease_version) @pytest.mark.parametrize( "current_version, expected_final_version", [ ("1.2.3-rc.1", "1.2.3"), ("1.2.3", "1.2.3"), ("1.1.1-rc.2", "1.1.1"), ("2.0.0-beta.1", "2.0.0"), ("2.27.0", "2.27.0"), ], ) def test_version_finalize_version(current_version, expected_final_version): v1 = Version.parse(current_version) assert v1.finalize_version() == Version.parse( expected_final_version, prerelease_token=v1.prerelease_token ) @pytest.mark.parametrize( "current_version, level, new_version", [ ("1.2.3", LevelBump.NO_RELEASE, "1.2.3"), ("1.2.3", LevelBump.PRERELEASE_REVISION, "1.2.3-rc.1"), ("1.2.3", LevelBump.PATCH, "1.2.4"), ("1.2.3", LevelBump.MINOR, "1.3.0"), ("1.2.3", LevelBump.MAJOR, "2.0.0"), ("1.2.3-rc.1", LevelBump.NO_RELEASE, "1.2.3-rc.1"), ("1.2.3-rc.1", LevelBump.PRERELEASE_REVISION, "1.2.3-rc.2"), ("1.2.3-rc.1", LevelBump.PATCH, "1.2.4-rc.1"), ("1.2.3-rc.1", LevelBump.MINOR, "1.3.0-rc.1"), ("1.2.3-rc.1", LevelBump.MAJOR, "2.0.0-rc.1"), ], ) def test_version_bump_succeeds(current_version, level, new_version): cv = Version.parse(current_version) nv = cv.bump(level) assert nv == Version.parse(new_version) assert cv + level == Version.parse(new_version) @pytest.mark.parametrize("bad_level", [5, "patch", {"major": True}, [1, 1, 0, 0, 1], 1]) def test_version_bump_typeerror(bad_level): with pytest.raises(TypeError): Version.parse("1.2.3").bump(bad_level) def test_version_hashable(a_version): _ = {a_version: 4} assert True # NOTE: this might be a really good first candidate for hypothesis @pytest.mark.parametrize( "major, minor, patch, prerelease_revision", [tuple(random.choice(range(1, 100)) for _ in range(4)) for _ in range(10)], ) def test_prerelease_always_less_than_full(major, minor, patch, prerelease_revision): full = Version(major, minor, patch) pre = Version(major, minor, patch, prerelease_revision=prerelease_revision) assert pre < full python-semantic-release-9.21.0/tests/util.py000066400000000000000000000225701475670435200210570ustar00rootroot00000000000000from __future__ import annotations import importlib.util import os import secrets import shutil import stat import string from contextlib import contextmanager, suppress from pathlib import Path from textwrap import indent from typing import TYPE_CHECKING, Tuple from git import Git, Repo from pydantic.dataclasses import dataclass from semantic_release.changelog.context import ChangelogMode, make_changelog_context from semantic_release.changelog.release_history import ReleaseHistory from semantic_release.commit_parser._base import CommitParser, ParserOptions from semantic_release.commit_parser.conventional import ConventionalCommitParser from semantic_release.commit_parser.token import ( ParsedCommit, ParsedMessageResult, ParseError, ParseResult, ) from semantic_release.enums import LevelBump from tests.const import SUCCESS_EXIT_CODE if TYPE_CHECKING: import filecmp from typing import Any, Callable, Generator, Iterable, TypeVar try: # Python 3.8 and 3.9 compatibility from typing_extensions import TypeAlias except ImportError: from typing import TypeAlias # type: ignore[attr-defined, no-redef] from unittest.mock import MagicMock from click.testing import Result as ClickInvokeResult from git import Commit from semantic_release.cli.config import RuntimeContext _R = TypeVar("_R") GitCommandWrapperType: TypeAlias = Git def get_func_qual_name(func: Callable) -> str: return str.join(".", filter(None, [func.__module__, func.__qualname__])) def assert_exit_code( exit_code: int, result: ClickInvokeResult, cli_cmd: list[str] ) -> bool: if result.exit_code != exit_code: raise AssertionError( str.join( os.linesep, [ f"{result.exit_code} != {exit_code} (actual != expected)", "", # Explain what command failed "Unexpected exit code from command:", # f" '{str.join(' ', cli_cmd)}'", indent(f"'{str.join(' ', cli_cmd)}'", " " * 2), "", # Add indentation to each line for stdout & stderr "stdout:", indent(result.stdout, " " * 2), "stderr:", indent(result.stderr, " " * 2), ], ) ) return True def assert_successful_exit_code(result: ClickInvokeResult, cli_cmd: list[str]) -> bool: return assert_exit_code(SUCCESS_EXIT_CODE, result, cli_cmd) def get_full_qualname(callable_obj: Callable) -> str: parts = filter( None, [ callable_obj.__module__, ( None if callable_obj.__class__.__name__ == "function" else callable_obj.__class__.__name__ ), callable_obj.__name__, ], ) return str.join(".", parts) def copy_dir_tree(src_dir: Path | str, dst_dir: Path | str) -> None: """Compatibility wrapper for shutil.copytree""" # python3.8+ shutil.copytree( src=str(src_dir), dst=str(dst_dir), dirs_exist_ok=True, ) def remove_dir_tree(directory: Path | str = ".", force: bool = False) -> None: """ Compatibility wrapper for shutil.rmtree Helpful for deleting directories with .git/* files, which usually have some read-only permissions """ def on_read_only_error(_func, path, _exc_info): os.chmod(path, stat.S_IWRITE) os.unlink(path) # Prevent error if already deleted or never existed, that is our desired state with suppress(FileNotFoundError): shutil.rmtree(str(directory), onerror=on_read_only_error if force else None) def dynamic_python_import(file_path: Path, module_name: str): spec = importlib.util.spec_from_file_location(module_name, str(file_path)) module = importlib.util.module_from_spec(spec) # type: ignore[arg-type] spec.loader.exec_module(module) # type: ignore[union-attr] return module @contextmanager def temporary_working_directory(directory: Path | str) -> Generator[None, None, None]: cwd = os.getcwd() os.chdir(str(directory)) try: yield finally: os.chdir(cwd) def shortuid(length: int = 8) -> str: alphabet = string.ascii_lowercase + string.digits return "".join(secrets.choice(alphabet) for _ in range(length)) def add_text_to_file(repo: Repo, filename: str, text: str | None = None): """Makes a deterministic file change for testing""" tgt_file = Path(repo.working_tree_dir or ".") / filename tgt_file.parent.mkdir(parents=True, exist_ok=True) file_contents = tgt_file.read_text() if tgt_file.exists() else "" line_number = len(file_contents.splitlines()) file_contents += f"{line_number} {text or 'default text'}{os.linesep}" tgt_file.write_text(file_contents, encoding="utf-8") repo.index.add(filename) def flatten_dircmp(dcmp: filecmp.dircmp) -> list[str]: return ( dcmp.diff_files + dcmp.left_only + dcmp.right_only + [ os.sep.join((directory, file)) for directory, cmp in dcmp.subdirs.items() for file in flatten_dircmp(cmp) ] ) def xdist_sort_hack(it: Iterable[_R]) -> Iterable[_R]: """ hack for pytest-xdist https://pytest-xdist.readthedocs.io/en/latest/known-limitations.html#workarounds taking an iterable of params for a pytest.mark.parametrize decorator, this ensures a deterministic sort so that xdist can always work Being able to use `pytest -nauto` is a huge speedup on testing """ return dict(enumerate(it)).values() def actions_output_to_dict(output: str) -> dict[str, str]: return {line.split("=")[0]: line.split("=")[1] for line in output.splitlines()} def get_release_history_from_context(runtime_context: RuntimeContext) -> ReleaseHistory: with Repo(str(runtime_context.repo_dir)) as git_repo: release_history = ReleaseHistory.from_git_history( git_repo, runtime_context.version_translator, runtime_context.commit_parser, runtime_context.changelog_excluded_commit_patterns, ) changelog_context = make_changelog_context( hvcs_client=runtime_context.hvcs_client, release_history=release_history, mode=ChangelogMode.INIT, prev_changelog_file=Path("CHANGELOG.md"), insertion_flag="", mask_initial_release=runtime_context.changelog_mask_initial_release, ) changelog_context.bind_to_environment(runtime_context.template_environment) return release_history def prepare_mocked_git_command_wrapper_type( **mocked_methods: MagicMock, ) -> type[GitCommandWrapperType]: """ Mock the specified methods of `Repo.GitCommandWrapperType` (`git.Git` by default). Initialized `MagicMock` objects are passed as keyword arguments, where the argument name is the name of the method to mock. For example, the following invocation mocks the `Repo.git.push()` command / method. Arrange: >>> from unittest.mock import MagicMock >>> from git import Repo >>> mocked_push = MagicMock() >>> cls = prepare_mocked_git_command_wrapper_type(push=mocked_push) >>> Repo.GitCommandWrapperType = cls >>> repo = Repo(".") Act: >>> repo.git.push("origin", "master") Assert: >>> mocked_push.assert_called_once() """ class MockGitCommandWrapperType(Git): def __getattr__(self, name: str) -> Any: try: return object.__getattribute__(self, f"mocked_{name}") except AttributeError: return super().__getattr__(name) for name, method in mocked_methods.items(): setattr(MockGitCommandWrapperType, f"mocked_{name}", method) return MockGitCommandWrapperType class CustomParserWithNoOpts(CommitParser[ParseResult, ParserOptions]): def parse(self, commit: Commit) -> ParsedCommit | ParseError: return ParsedCommit( bump=LevelBump.NO_RELEASE, type="", scope="", descriptions=[], breaking_descriptions=[], commit=commit, ) @dataclass class CustomParserOpts(ParserOptions): allowed_tags: Tuple[str, ...] = ("new", "custom") # noqa: UP006 class CustomParserWithOpts(CommitParser[ParseResult, CustomParserOpts]): parser_options = CustomParserOpts def parse(self, commit: Commit) -> ParsedCommit | ParseError: return ParsedCommit( bump=LevelBump.NO_RELEASE, type="custom", scope="", descriptions=[], breaking_descriptions=[], commit=commit, ) class IncompleteCustomParser(CommitParser): pass class CustomConventionalParserWithIgnorePatterns(ConventionalCommitParser): def parse(self, commit: Commit) -> ParsedCommit | ParseError: if not (parse_msg_result := super().parse_message(str(commit.message))): return ParseError(commit, "Unable to parse commit") return ParsedCommit.from_parsed_message_result( commit, ParsedMessageResult( **{ **parse_msg_result._asdict(), "include_in_changelog": bool( not str(commit.message).startswith("chore") ), } ), )